Merge branch 'dev' into mill
This commit is contained in:
commit
3ea6d3b139
530 changed files with 16423 additions and 4585 deletions
|
@ -79,6 +79,7 @@ components: &components
|
|||
- homeassistant/components/group/**
|
||||
- homeassistant/components/hassio/**
|
||||
- homeassistant/components/homeassistant/**
|
||||
- homeassistant/components/homeassistant_hardware/**
|
||||
- homeassistant/components/http/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/input_boolean/**
|
||||
|
|
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
|
@ -531,7 +531,7 @@ jobs:
|
|||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
|
||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
|
22
.github/workflows/ci.yaml
vendored
22
.github/workflows/ci.yaml
vendored
|
@ -42,7 +42,7 @@ env:
|
|||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2024.12"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
|
@ -622,13 +622,13 @@ jobs:
|
|||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
|
@ -819,11 +819,7 @@ jobs:
|
|||
needs:
|
||||
- info
|
||||
- base
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: Split tests for full run Python ${{ matrix.python-version }}
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
|
@ -836,11 +832,11 @@ jobs:
|
|||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
|
@ -858,7 +854,7 @@ jobs:
|
|||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
overwrite: true
|
||||
|
||||
|
@ -923,7 +919,7 @@ jobs:
|
|||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
|
@ -24,11 +24,11 @@ jobs:
|
|||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.0
|
||||
uses: github/codeql-action/init@v3.27.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.0
|
||||
uses: github/codeql-action/analyze@v3.27.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
|
30
.github/workflows/wheels.yml
vendored
30
.github/workflows/wheels.yml
vendored
|
@ -112,7 +112,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312"]
|
||||
abi: ["cp312", "cp313"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
|
@ -135,14 +135,14 @@ jobs:
|
|||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
@ -156,7 +156,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312"]
|
||||
abi: ["cp312", "cp313"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
|
@ -198,6 +198,7 @@ jobs:
|
|||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
|
@ -208,7 +209,8 @@ jobs:
|
|||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
@ -223,43 +225,43 @@ jobs:
|
|||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.2
|
||||
rev: v0.7.3
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@ -90,7 +90,7 @@ repos:
|
|||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
|
|
@ -330,6 +330,7 @@ homeassistant.components.mysensors.*
|
|||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
|
@ -339,6 +340,7 @@ homeassistant.components.nfandroidtv.*
|
|||
homeassistant.components.nightscout.*
|
||||
homeassistant.components.nissan_leaf.*
|
||||
homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.number.*
|
||||
|
|
|
@ -970,6 +970,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/neato/ @Santobert
|
||||
/tests/components/neato/ @Santobert
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
|
@ -1010,6 +1012,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||
/tests/components/nordpool/ @gjohansson-ST
|
||||
/homeassistant/components/notify/ @home-assistant/core
|
||||
/tests/components/notify/ @home-assistant/core
|
||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||
|
@ -1340,6 +1344,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
/tests/components/skybell/ @tkdrob
|
||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||
|
|
|
@ -13,7 +13,7 @@ ENV \
|
|||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.4.28
|
||||
RUN pip3 install uv==0.5.0
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -55,7 +55,7 @@ RUN \
|
|||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
|
|
@ -35,6 +35,9 @@ RUN \
|
|||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv
|
||||
|
||||
|
|
5
homeassistant/brands/sky.json
Normal file
5
homeassistant/brands/sky.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"domain": "sky",
|
||||
"name": "Sky",
|
||||
"integrations": ["sky_hub", "sky_remote"]
|
||||
}
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.23"]
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import asyncio
|
|||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, Final, final
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
|
||||
from propcache import cached_property
|
||||
import voluptuous as vol
|
||||
|
@ -221,9 +221,15 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
|||
@property
|
||||
def state(self) -> str | None:
|
||||
"""Return the current state."""
|
||||
if (alarm_state := self.alarm_state) is None:
|
||||
return None
|
||||
return alarm_state
|
||||
if (alarm_state := self.alarm_state) is not None:
|
||||
return alarm_state
|
||||
if self._attr_state is not None:
|
||||
# Backwards compatibility for integrations that set state directly
|
||||
# Should be removed in 2025.11
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self._attr_state, str)
|
||||
return self._attr_state
|
||||
return None
|
||||
|
||||
@cached_property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
|
|
|
@ -32,7 +32,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
|
||||
async def async_handle_create_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating backups."""
|
||||
await backup_manager.async_create_backup()
|
||||
await backup_manager.async_create_backup(on_progress=None)
|
||||
if backup_task := backup_manager.backup_task:
|
||||
await backup_task
|
||||
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
|
||||
|
|
|
@ -2,23 +2,26 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DOMAIN
|
||||
from .manager import BaseBackupManager
|
||||
from .const import DATA_MANAGER
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_http_views(hass: HomeAssistant) -> None:
|
||||
"""Register the http views."""
|
||||
hass.http.register_view(DownloadBackupView)
|
||||
hass.http.register_view(UploadBackupView)
|
||||
|
||||
|
||||
class DownloadBackupView(HomeAssistantView):
|
||||
|
@ -36,7 +39,7 @@ class DownloadBackupView(HomeAssistantView):
|
|||
if not request["hass_user"].is_admin:
|
||||
return Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
manager: BaseBackupManager = request.app[KEY_HASS].data[DOMAIN]
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
backup = await manager.async_get_backup(slug=slug)
|
||||
|
||||
if backup is None or not backup.path.exists():
|
||||
|
@ -48,3 +51,29 @@ class DownloadBackupView(HomeAssistantView):
|
|||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
||||
url = "/api/backup/upload"
|
||||
name = "api:backup:upload"
|
||||
|
||||
@require_admin
|
||||
async def post(self, request: Request) -> Response:
|
||||
"""Upload a backup file."""
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
reader = await request.multipart()
|
||||
contents = cast(BodyPartReader, await reader.next())
|
||||
|
||||
try:
|
||||
await manager.async_receive_backup(contents=contents)
|
||||
except OSError as err:
|
||||
return Response(
|
||||
body=f"Can't write backup file {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
return Response(status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
||||
|
||||
return Response(status=HTTPStatus.CREATED)
|
||||
|
|
|
@ -4,16 +4,21 @@ from __future__ import annotations
|
|||
|
||||
import abc
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import asdict, dataclass
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
from pathlib import Path
|
||||
from queue import SimpleQueue
|
||||
import shutil
|
||||
import tarfile
|
||||
from tarfile import TarError
|
||||
from tempfile import TemporaryDirectory
|
||||
import time
|
||||
from typing import Any, Protocol, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
|
||||
from homeassistant.backup_restore import RESTORE_BACKUP_FILE
|
||||
|
@ -30,6 +35,13 @@ from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER
|
|||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class NewBackup:
|
||||
"""New backup class."""
|
||||
|
||||
slug: str
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Backup:
|
||||
"""Backup class."""
|
||||
|
@ -45,6 +57,15 @@ class Backup:
|
|||
return {**asdict(self), "path": self.path.as_posix()}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class BackupProgress:
|
||||
"""Backup progress class."""
|
||||
|
||||
done: bool
|
||||
stage: str | None
|
||||
success: bool | None
|
||||
|
||||
|
||||
class BackupPlatformProtocol(Protocol):
|
||||
"""Define the format that backup platforms can have."""
|
||||
|
||||
|
@ -61,7 +82,7 @@ class BaseBackupManager(abc.ABC):
|
|||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the backup manager."""
|
||||
self.hass = hass
|
||||
self.backing_up = False
|
||||
self.backup_task: asyncio.Task | None = None
|
||||
self.backups: dict[str, Backup] = {}
|
||||
self.loaded_platforms = False
|
||||
self.platforms: dict[str, BackupPlatformProtocol] = {}
|
||||
|
@ -126,10 +147,15 @@ class BaseBackupManager(abc.ABC):
|
|||
|
||||
@abc.abstractmethod
|
||||
async def async_restore_backup(self, slug: str, **kwargs: Any) -> None:
|
||||
"""Restpre a backup."""
|
||||
"""Restore a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_create_backup(self, **kwargs: Any) -> Backup:
|
||||
async def async_create_backup(
|
||||
self,
|
||||
*,
|
||||
on_progress: Callable[[BackupProgress], None] | None,
|
||||
**kwargs: Any,
|
||||
) -> NewBackup:
|
||||
"""Generate a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -147,6 +173,15 @@ class BaseBackupManager(abc.ABC):
|
|||
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||
"""Remove a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
*,
|
||||
contents: aiohttp.BodyPartReader,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
|
||||
|
||||
class BackupManager(BaseBackupManager):
|
||||
"""Backup manager for the Backup integration."""
|
||||
|
@ -222,17 +257,93 @@ class BackupManager(BaseBackupManager):
|
|||
LOGGER.debug("Removed backup located at %s", backup.path)
|
||||
self.backups.pop(slug)
|
||||
|
||||
async def async_create_backup(self, **kwargs: Any) -> Backup:
|
||||
"""Generate a backup."""
|
||||
if self.backing_up:
|
||||
raise HomeAssistantError("Backup already in progress")
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
*,
|
||||
contents: aiohttp.BodyPartReader,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = (
|
||||
SimpleQueue()
|
||||
)
|
||||
temp_dir_handler = await self.hass.async_add_executor_job(TemporaryDirectory)
|
||||
target_temp_file = Path(
|
||||
temp_dir_handler.name, contents.filename or "backup.tar"
|
||||
)
|
||||
|
||||
def _sync_queue_consumer() -> None:
|
||||
with target_temp_file.open("wb") as file_handle:
|
||||
while True:
|
||||
if (_chunk_future := queue.get()) is None:
|
||||
break
|
||||
_chunk, _future = _chunk_future
|
||||
if _future is not None:
|
||||
self.hass.loop.call_soon_threadsafe(_future.set_result, None)
|
||||
file_handle.write(_chunk)
|
||||
|
||||
fut: asyncio.Future[None] | None = None
|
||||
try:
|
||||
fut = self.hass.async_add_executor_job(_sync_queue_consumer)
|
||||
megabytes_sending = 0
|
||||
while chunk := await contents.read_chunk(BUF_SIZE):
|
||||
megabytes_sending += 1
|
||||
if megabytes_sending % 5 != 0:
|
||||
queue.put_nowait((chunk, None))
|
||||
continue
|
||||
|
||||
chunk_future = self.hass.loop.create_future()
|
||||
queue.put_nowait((chunk, chunk_future))
|
||||
await asyncio.wait(
|
||||
(fut, chunk_future),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if fut.done():
|
||||
# The executor job failed
|
||||
break
|
||||
|
||||
queue.put_nowait(None) # terminate queue consumer
|
||||
finally:
|
||||
if fut is not None:
|
||||
await fut
|
||||
|
||||
def _move_and_cleanup() -> None:
|
||||
shutil.move(target_temp_file, self.backup_dir / target_temp_file.name)
|
||||
temp_dir_handler.cleanup()
|
||||
|
||||
await self.hass.async_add_executor_job(_move_and_cleanup)
|
||||
await self.load_backups()
|
||||
|
||||
async def async_create_backup(
|
||||
self,
|
||||
*,
|
||||
on_progress: Callable[[BackupProgress], None] | None,
|
||||
**kwargs: Any,
|
||||
) -> NewBackup:
|
||||
"""Generate a backup."""
|
||||
if self.backup_task:
|
||||
raise HomeAssistantError("Backup already in progress")
|
||||
backup_name = f"Core {HAVERSION}"
|
||||
date_str = dt_util.now().isoformat()
|
||||
slug = _generate_slug(date_str, backup_name)
|
||||
self.backup_task = self.hass.async_create_task(
|
||||
self._async_create_backup(backup_name, date_str, slug, on_progress),
|
||||
name="backup_manager_create_backup",
|
||||
eager_start=False, # To ensure the task is not started before we return
|
||||
)
|
||||
return NewBackup(slug=slug)
|
||||
|
||||
async def _async_create_backup(
|
||||
self,
|
||||
backup_name: str,
|
||||
date_str: str,
|
||||
slug: str,
|
||||
on_progress: Callable[[BackupProgress], None] | None,
|
||||
) -> Backup:
|
||||
"""Generate a backup."""
|
||||
success = False
|
||||
try:
|
||||
self.backing_up = True
|
||||
await self.async_pre_backup_actions()
|
||||
backup_name = f"Core {HAVERSION}"
|
||||
date_str = dt_util.now().isoformat()
|
||||
slug = _generate_slug(date_str, backup_name)
|
||||
|
||||
backup_data = {
|
||||
"slug": slug,
|
||||
|
@ -259,9 +370,12 @@ class BackupManager(BaseBackupManager):
|
|||
if self.loaded_backups:
|
||||
self.backups[slug] = backup
|
||||
LOGGER.debug("Generated new backup with slug %s", slug)
|
||||
success = True
|
||||
return backup
|
||||
finally:
|
||||
self.backing_up = False
|
||||
if on_progress:
|
||||
on_progress(BackupProgress(done=True, stage=None, success=success))
|
||||
self.backup_task = None
|
||||
await self.async_post_backup_actions()
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
|
|
|
@ -8,6 +8,7 @@ from homeassistant.components import websocket_api
|
|||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import BackupProgress
|
||||
|
||||
|
||||
@callback
|
||||
|
@ -40,7 +41,7 @@ async def handle_info(
|
|||
msg["id"],
|
||||
{
|
||||
"backups": list(backups.values()),
|
||||
"backing_up": manager.backing_up,
|
||||
"backing_up": manager.backup_task is not None,
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -113,7 +114,11 @@ async def handle_create(
|
|||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup."""
|
||||
backup = await hass.data[DATA_MANAGER].async_create_backup()
|
||||
|
||||
def on_progress(progress: BackupProgress) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], progress))
|
||||
|
||||
backup = await hass.data[DATA_MANAGER].async_create_backup(on_progress=on_progress)
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
|
@ -127,7 +132,6 @@ async def handle_backup_start(
|
|||
) -> None:
|
||||
"""Backup start notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = True
|
||||
LOGGER.debug("Backup start notification")
|
||||
|
||||
try:
|
||||
|
@ -149,7 +153,6 @@ async def handle_backup_end(
|
|||
) -> None:
|
||||
"""Backup end notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = False
|
||||
LOGGER.debug("Backup end notification")
|
||||
|
||||
try:
|
||||
|
|
|
@ -17,46 +17,9 @@ from homeassistant.components.media_player import (
|
|||
class BangOlufsenSource:
|
||||
"""Class used for associating device source ids with friendly names. May not include all sources."""
|
||||
|
||||
URI_STREAMER: Final[Source] = Source(
|
||||
name="Audio Streamer",
|
||||
id="uriStreamer",
|
||||
is_seekable=False,
|
||||
)
|
||||
BLUETOOTH: Final[Source] = Source(
|
||||
name="Bluetooth",
|
||||
id="bluetooth",
|
||||
is_seekable=False,
|
||||
)
|
||||
CHROMECAST: Final[Source] = Source(
|
||||
name="Chromecast built-in",
|
||||
id="chromeCast",
|
||||
is_seekable=False,
|
||||
)
|
||||
LINE_IN: Final[Source] = Source(
|
||||
name="Line-In",
|
||||
id="lineIn",
|
||||
is_seekable=False,
|
||||
)
|
||||
SPDIF: Final[Source] = Source(
|
||||
name="Optical",
|
||||
id="spdif",
|
||||
is_seekable=False,
|
||||
)
|
||||
NET_RADIO: Final[Source] = Source(
|
||||
name="B&O Radio",
|
||||
id="netRadio",
|
||||
is_seekable=False,
|
||||
)
|
||||
DEEZER: Final[Source] = Source(
|
||||
name="Deezer",
|
||||
id="deezer",
|
||||
is_seekable=True,
|
||||
)
|
||||
TIDAL: Final[Source] = Source(
|
||||
name="Tidal",
|
||||
id="tidal",
|
||||
is_seekable=True,
|
||||
)
|
||||
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
|
||||
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
|
||||
|
||||
BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = {
|
||||
|
@ -170,20 +133,6 @@ VALID_MEDIA_TYPES: Final[tuple] = (
|
|||
MediaType.CHANNEL,
|
||||
)
|
||||
|
||||
# Sources on the device that should not be selectable by the user
|
||||
HIDDEN_SOURCE_IDS: Final[tuple] = (
|
||||
"airPlay",
|
||||
"bluetooth",
|
||||
"chromeCast",
|
||||
"generator",
|
||||
"local",
|
||||
"dlna",
|
||||
"qplay",
|
||||
"wpl",
|
||||
"pl",
|
||||
"beolink",
|
||||
"usbIn",
|
||||
)
|
||||
|
||||
# Fallback sources to use in case of API failure.
|
||||
FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
|
@ -191,7 +140,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
|||
Source(
|
||||
id="uriStreamer",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Audio Streamer",
|
||||
type=SourceTypeEnum(value="uriStreamer"),
|
||||
is_seekable=False,
|
||||
|
@ -199,7 +148,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
|||
Source(
|
||||
id="bluetooth",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Bluetooth",
|
||||
type=SourceTypeEnum(value="bluetooth"),
|
||||
is_seekable=False,
|
||||
|
@ -207,7 +156,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
|||
Source(
|
||||
id="spotify",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Spotify Connect",
|
||||
type=SourceTypeEnum(value="spotify"),
|
||||
is_seekable=True,
|
||||
|
|
9
homeassistant/components/bang_olufsen/icons.json
Normal file
9
homeassistant/components/bang_olufsen/icons.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"services": {
|
||||
"beolink_join": { "service": "mdi:location-enter" },
|
||||
"beolink_expand": { "service": "mdi:location-enter" },
|
||||
"beolink_unexpand": { "service": "mdi:location-exit" },
|
||||
"beolink_leave": { "service": "mdi:close-circle-outline" },
|
||||
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" }
|
||||
}
|
||||
}
|
|
@ -11,7 +11,7 @@ from typing import TYPE_CHECKING, Any, cast
|
|||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api import __version__ as MOZART_API_VERSION
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.exceptions import ApiException, NotFoundException
|
||||
from mozart_api.models import (
|
||||
Action,
|
||||
Art,
|
||||
|
@ -38,6 +38,7 @@ from mozart_api.models import (
|
|||
VolumeState,
|
||||
)
|
||||
from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
|
@ -55,10 +56,17 @@ from homeassistant.config_entries import ConfigEntry
|
|||
from homeassistant.const import CONF_MODEL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
|
@ -70,7 +78,6 @@ from .const import (
|
|||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
FALLBACK_SOURCES,
|
||||
HIDDEN_SOURCE_IDS,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
|
@ -117,6 +124,58 @@ async def async_setup_entry(
|
|||
]
|
||||
)
|
||||
|
||||
# Register actions.
|
||||
platform = async_get_current_platform()
|
||||
|
||||
jid_regex = vol.Match(
|
||||
r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$"
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_join",
|
||||
schema={vol.Optional("beolink_jid"): jid_regex},
|
||||
func="async_beolink_join",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_expand",
|
||||
schema={
|
||||
vol.Exclusive("all_discovered", "devices", ""): cv.boolean,
|
||||
vol.Exclusive(
|
||||
"beolink_jids",
|
||||
"devices",
|
||||
"Define either specific Beolink JIDs or all discovered",
|
||||
): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_expand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_unexpand",
|
||||
schema={
|
||||
vol.Required("beolink_jids"): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_unexpand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_leave",
|
||||
schema=None,
|
||||
func="async_beolink_leave",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_allstandby",
|
||||
schema=None,
|
||||
func="async_beolink_allstandby",
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Representation of a media player."""
|
||||
|
@ -157,6 +216,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
# Beolink compatible sources
|
||||
self._beolink_sources: dict[str, bool] = {}
|
||||
self._remote_leader: BeolinkLeader | None = None
|
||||
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
|
||||
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
|
@ -166,9 +227,11 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
CONNECTION_STATUS: self._async_update_connection_state,
|
||||
WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes,
|
||||
WebsocketNotification.BEOLINK: self._async_update_beolink,
|
||||
WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error,
|
||||
WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress,
|
||||
WebsocketNotification.PLAYBACK_SOURCE: self._async_update_sources,
|
||||
WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state,
|
||||
WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources,
|
||||
WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change,
|
||||
|
@ -230,6 +293,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
|
||||
await self._async_update_sound_modes()
|
||||
|
||||
# Update beolink attributes and device name.
|
||||
await self._async_update_name_and_beolink()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update queue settings."""
|
||||
# The WebSocket event listener is the main handler for connection state.
|
||||
|
@ -243,7 +309,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
if queue_settings.shuffle is not None:
|
||||
self._attr_shuffle = queue_settings.shuffle
|
||||
|
||||
async def _async_update_sources(self) -> None:
|
||||
async def _async_update_sources(self, _: Source | None = None) -> None:
|
||||
"""Get sources for the specific product."""
|
||||
|
||||
# Audio sources
|
||||
|
@ -270,10 +336,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
self._audio_sources = {
|
||||
source.id: source.name
|
||||
for source in cast(list[Source], sources.items)
|
||||
if source.is_enabled
|
||||
and source.id
|
||||
and source.name
|
||||
and source.id not in HIDDEN_SOURCE_IDS
|
||||
if source.is_enabled and source.id and source.name and source.is_playable
|
||||
}
|
||||
|
||||
# Some sources are not Beolink expandable, meaning that they can't be joined by
|
||||
|
@ -375,9 +438,44 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_update_name_and_beolink(self) -> None:
|
||||
"""Update the device friendly name."""
|
||||
beolink_self = await self._client.get_beolink_self()
|
||||
|
||||
# Update device name
|
||||
device_registry = dr.async_get(self.hass)
|
||||
assert self.device_entry is not None
|
||||
|
||||
device_registry.async_update_device(
|
||||
device_id=self.device_entry.id,
|
||||
name=beolink_self.friendly_name,
|
||||
)
|
||||
|
||||
await self._async_update_beolink()
|
||||
|
||||
async def _async_update_beolink(self) -> None:
|
||||
"""Update the current Beolink leader, listeners, peers and self."""
|
||||
|
||||
self._beolink_attributes = {}
|
||||
|
||||
assert self.device_entry is not None
|
||||
assert self.device_entry.name is not None
|
||||
|
||||
# Add Beolink self
|
||||
self._beolink_attributes = {
|
||||
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
|
||||
}
|
||||
|
||||
# Add Beolink peers
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
if len(peers) > 0:
|
||||
self._beolink_attributes["beolink"]["peers"] = {}
|
||||
for peer in peers:
|
||||
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
|
||||
peer.jid
|
||||
)
|
||||
|
||||
# Add Beolink listeners / leader
|
||||
self._remote_leader = self._playback_metadata.remote_leader
|
||||
|
||||
|
@ -397,9 +495,14 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
# Add self
|
||||
group_members.append(self.entity_id)
|
||||
|
||||
self._beolink_attributes["beolink"]["leader"] = {
|
||||
self._remote_leader.friendly_name: self._remote_leader.jid,
|
||||
}
|
||||
|
||||
# If not listener, check if leader.
|
||||
else:
|
||||
beolink_listeners = await self._client.get_beolink_listeners()
|
||||
beolink_listeners_attribute = {}
|
||||
|
||||
# Check if the device is a leader.
|
||||
if len(beolink_listeners) > 0:
|
||||
|
@ -420,6 +523,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
for beolink_listener in beolink_listeners
|
||||
]
|
||||
)
|
||||
# Update Beolink attributes
|
||||
for beolink_listener in beolink_listeners:
|
||||
for peer in peers:
|
||||
if peer.jid == beolink_listener.jid:
|
||||
# Get the friendly names for the listeners from the peers
|
||||
beolink_listeners_attribute[peer.friendly_name] = (
|
||||
beolink_listener.jid
|
||||
)
|
||||
break
|
||||
self._beolink_attributes["beolink"]["listeners"] = (
|
||||
beolink_listeners_attribute
|
||||
)
|
||||
|
||||
self._attr_group_members = group_members
|
||||
|
||||
|
@ -573,38 +688,19 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Return the current audio source."""
|
||||
|
||||
# Try to fix some of the source_change chromecast weirdness.
|
||||
if hasattr(self._playback_metadata, "title"):
|
||||
# source_change is chromecast but line in is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name:
|
||||
return BangOlufsenSource.LINE_IN.name
|
||||
|
||||
# source_change is chromecast but bluetooth is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name:
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
# source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket,
|
||||
# And the source has not changed.
|
||||
if self._source_change.id in (
|
||||
BangOlufsenSource.BLUETOOTH.id,
|
||||
BangOlufsenSource.LINE_IN.id,
|
||||
BangOlufsenSource.SPDIF.id,
|
||||
):
|
||||
return BangOlufsenSource.CHROMECAST.name
|
||||
|
||||
# source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork
|
||||
# So i assume that it is bluetooth and not chromecast
|
||||
if (
|
||||
hasattr(self._playback_metadata, "art")
|
||||
and self._playback_metadata.art is not None
|
||||
and len(self._playback_metadata.art) == 0
|
||||
and self._source_change.id == BangOlufsenSource.CHROMECAST.id
|
||||
):
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
return self._source_change.name
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return information that is not returned anywhere else."""
|
||||
attributes: dict[str, Any] = {}
|
||||
|
||||
# Add Beolink attributes
|
||||
if self._beolink_attributes:
|
||||
attributes.update(self._beolink_attributes)
|
||||
|
||||
return attributes
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Set the device to "networkStandby"."""
|
||||
await self._client.post_standby()
|
||||
|
@ -876,23 +972,30 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
# Beolink compatible B&O device.
|
||||
# Repeated presses / calls will cycle between compatible playing devices.
|
||||
if len(group_members) == 0:
|
||||
await self._async_beolink_join()
|
||||
await self.async_beolink_join()
|
||||
return
|
||||
|
||||
# Get JID for each group member
|
||||
jids = [self._get_beolink_jid(group_member) for group_member in group_members]
|
||||
await self._async_beolink_expand(jids)
|
||||
await self.async_beolink_expand(jids)
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Unjoin Beolink session. End session if leader."""
|
||||
await self._async_beolink_leave()
|
||||
await self.async_beolink_leave()
|
||||
|
||||
async def _async_beolink_join(self) -> None:
|
||||
# Custom actions:
|
||||
async def async_beolink_join(self, beolink_jid: str | None = None) -> None:
|
||||
"""Join a Beolink multi-room experience."""
|
||||
await self._client.join_latest_beolink_experience()
|
||||
if beolink_jid is None:
|
||||
await self._client.join_latest_beolink_experience()
|
||||
else:
|
||||
await self._client.join_beolink_peer(jid=beolink_jid)
|
||||
|
||||
async def _async_beolink_expand(self, beolink_jids: list[str]) -> None:
|
||||
async def async_beolink_expand(
|
||||
self, beolink_jids: list[str] | None = None, all_discovered: bool = False
|
||||
) -> None:
|
||||
"""Expand a Beolink multi-room experience with a device or devices."""
|
||||
|
||||
# Ensure that the current source is expandable
|
||||
if not self._beolink_sources[cast(str, self._source_change.id)]:
|
||||
raise ServiceValidationError(
|
||||
|
@ -904,10 +1007,37 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
},
|
||||
)
|
||||
|
||||
# Try to expand to all defined devices
|
||||
for beolink_jid in beolink_jids:
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
# Expand to all discovered devices
|
||||
if all_discovered:
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
async def _async_beolink_leave(self) -> None:
|
||||
for peer in peers:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=peer.jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning("Unable to expand to %s", peer.jid)
|
||||
|
||||
# Try to expand to all defined devices
|
||||
elif beolink_jids:
|
||||
for beolink_jid in beolink_jids:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning(
|
||||
"Unable to expand to %s. Is the device available on the network?",
|
||||
beolink_jid,
|
||||
)
|
||||
|
||||
async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None:
|
||||
"""Unexpand a Beolink multi-room experience with a device or devices."""
|
||||
# Unexpand all defined devices
|
||||
for beolink_jid in beolink_jids:
|
||||
await self._client.post_beolink_unexpand(jid=beolink_jid)
|
||||
|
||||
async def async_beolink_leave(self) -> None:
|
||||
"""Leave the current Beolink experience."""
|
||||
await self._client.post_beolink_leave()
|
||||
|
||||
async def async_beolink_allstandby(self) -> None:
|
||||
"""Set all connected Beolink devices to standby."""
|
||||
await self._client.post_beolink_allstandby()
|
||||
|
|
79
homeassistant/components/bang_olufsen/services.yaml
Normal file
79
homeassistant/components/bang_olufsen/services.yaml
Normal file
|
@ -0,0 +1,79 @@
|
|||
beolink_allstandby:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
example: false
|
||||
selector:
|
||||
boolean:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: false
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
||||
|
||||
beolink_join:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jid:
|
||||
required: false
|
||||
example: 1111.2222222.33333333@products.bang-olufsen.com
|
||||
selector:
|
||||
text:
|
||||
|
||||
beolink_leave:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: true
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
|
@ -1,4 +1,8 @@
|
|||
{
|
||||
"common": {
|
||||
"jid_options_name": "JID options",
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
"api_exception": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
|
@ -25,6 +29,68 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": {
|
||||
"name": "Beolink all standby",
|
||||
"description": "Set all Connected Beolink devices to standby."
|
||||
},
|
||||
"beolink_expand": {
|
||||
"name": "Beolink expand",
|
||||
"description": "Expand current Beolink experience.",
|
||||
"fields": {
|
||||
"all_discovered": {
|
||||
"name": "All discovered",
|
||||
"description": "Expand Beolink experience to all discovered devices."
|
||||
},
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will join current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_join": {
|
||||
"name": "Beolink join",
|
||||
"description": "Join a Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jid": {
|
||||
"name": "Beolink JID",
|
||||
"description": "Manually specify Beolink JID to join."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_leave": {
|
||||
"name": "Beolink leave",
|
||||
"description": "Leave a Beolink experience."
|
||||
},
|
||||
"beolink_unexpand": {
|
||||
"name": "Beolink unexpand",
|
||||
"description": "Unexpand from current Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will leave from current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"m3u_invalid_format": {
|
||||
"message": "Media sources with the .m3u extension are not supported."
|
||||
|
|
|
@ -63,6 +63,9 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
|||
self._client.get_playback_progress_notifications(
|
||||
self.on_playback_progress_notification
|
||||
)
|
||||
self._client.get_playback_source_notifications(
|
||||
self.on_playback_source_notification
|
||||
)
|
||||
self._client.get_playback_state_notifications(
|
||||
self.on_playback_state_notification
|
||||
)
|
||||
|
@ -117,6 +120,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
|||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.CONFIGURATION:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
|
@ -157,6 +165,14 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
|||
notification,
|
||||
)
|
||||
|
||||
def on_playback_source_notification(self, notification: Source) -> None:
|
||||
"""Send playback_source dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_source_change_notification(self, notification: Source) -> None:
|
||||
"""Send source_change dispatch."""
|
||||
async_dispatcher_send(
|
||||
|
|
|
@ -10,7 +10,11 @@ from homeassistant.components.sensor import (
|
|||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
@ -32,6 +36,8 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
|||
SensorEntityDescription(
|
||||
key=TYPE_WIFI_STRENGTH,
|
||||
translation_key="wifi_strength",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
|
|
@ -364,12 +364,13 @@ class BluesoundPlayer(MediaPlayerEntity):
|
|||
if self.is_grouped and not self.is_master:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
status = self._status.state
|
||||
if status in ("pause", "stop"):
|
||||
return MediaPlayerState.PAUSED
|
||||
if status in ("stream", "play"):
|
||||
return MediaPlayerState.PLAYING
|
||||
return MediaPlayerState.IDLE
|
||||
match self._status.state:
|
||||
case "pause":
|
||||
return MediaPlayerState.PAUSED
|
||||
case "stream" | "play":
|
||||
return MediaPlayerState.PLAYING
|
||||
case _:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
|
@ -769,7 +770,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
|||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Send volume_up command to media player."""
|
||||
volume = int(volume * 100)
|
||||
volume = int(round(volume * 100))
|
||||
volume = min(100, volume)
|
||||
volume = max(0, volume)
|
||||
|
||||
|
|
|
@ -16,7 +16,8 @@
|
|||
"list_access": {
|
||||
"default": "mdi:account-lock",
|
||||
"state": {
|
||||
"shared": "mdi:account-group"
|
||||
"shared": "mdi:account-group",
|
||||
"invitation": "mdi:account-multiple-plus"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -79,7 +79,7 @@ SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = (
|
|||
translation_key=BringSensor.LIST_ACCESS,
|
||||
value_fn=lambda lst, _: lst["status"].lower(),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=["registered", "shared"],
|
||||
options=["registered", "shared", "invitation"],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -66,7 +66,8 @@
|
|||
"name": "List access",
|
||||
"state": {
|
||||
"registered": "Private",
|
||||
"shared": "Shared"
|
||||
"shared": "Shared",
|
||||
"invitation": "Invitation pending"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -421,8 +421,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
if hass.config.webrtc.ice_servers:
|
||||
return hass.config.webrtc.ice_servers
|
||||
return [
|
||||
RTCIceServer(urls="stun:stun.home-assistant.io:80"),
|
||||
RTCIceServer(urls="stun:stun.home-assistant.io:3478"),
|
||||
RTCIceServer(
|
||||
urls=[
|
||||
"stun:stun.home-assistant.io:80",
|
||||
"stun:stun.home-assistant.io:3478",
|
||||
]
|
||||
),
|
||||
]
|
||||
|
||||
async_register_ice_servers(hass, get_ice_servers)
|
||||
|
@ -472,6 +476,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||
_attr_state: None = None # State is determined by is_on
|
||||
_attr_supported_features: CameraEntityFeature = CameraEntityFeature(0)
|
||||
|
||||
__supports_stream: CameraEntityFeature | None = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a camera."""
|
||||
self._cache: dict[str, Any] = {}
|
||||
|
@ -783,6 +789,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
|
@ -848,7 +857,10 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||
]
|
||||
config.configuration.ice_servers.extend(ice_servers)
|
||||
|
||||
config.get_candidates_upfront = self._legacy_webrtc_provider is not None
|
||||
config.get_candidates_upfront = (
|
||||
self._supports_native_sync_webrtc
|
||||
or self._legacy_webrtc_provider is not None
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
@ -889,6 +901,21 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||
|
||||
return CameraCapabilities(frontend_stream_types)
|
||||
|
||||
@callback
|
||||
def async_write_ha_state(self) -> None:
|
||||
"""Write the state to the state machine.
|
||||
|
||||
Schedules async_refresh_providers if support of streams have changed.
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
self.hass.async_create_task(self.async_refresh_providers())
|
||||
|
||||
|
||||
class CameraView(HomeAssistantView):
|
||||
"""Base CameraView."""
|
||||
|
|
|
@ -440,16 +440,16 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
|
|||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "cloud/update_prefs",
|
||||
vol.Optional(PREF_ENABLE_GOOGLE): bool,
|
||||
vol.Optional(PREF_ENABLE_ALEXA): bool,
|
||||
vol.Optional(PREF_ALEXA_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_ENABLE_ALEXA): bool,
|
||||
vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool,
|
||||
vol.Optional(PREF_ENABLE_GOOGLE): bool,
|
||||
vol.Optional(PREF_GOOGLE_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str),
|
||||
vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool,
|
||||
vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All(
|
||||
vol.Coerce(tuple), validate_language_voice
|
||||
),
|
||||
vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool,
|
||||
vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
|
|
@ -163,21 +163,21 @@ class CloudPreferences:
|
|||
async def async_update(
|
||||
self,
|
||||
*,
|
||||
google_enabled: bool | UndefinedType = UNDEFINED,
|
||||
alexa_enabled: bool | UndefinedType = UNDEFINED,
|
||||
remote_enabled: bool | UndefinedType = UNDEFINED,
|
||||
google_secure_devices_pin: str | None | UndefinedType = UNDEFINED,
|
||||
cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED,
|
||||
cloud_user: str | UndefinedType = UNDEFINED,
|
||||
alexa_report_state: bool | UndefinedType = UNDEFINED,
|
||||
google_report_state: bool | UndefinedType = UNDEFINED,
|
||||
tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED,
|
||||
remote_domain: str | None | UndefinedType = UNDEFINED,
|
||||
alexa_settings_version: int | UndefinedType = UNDEFINED,
|
||||
google_settings_version: int | UndefinedType = UNDEFINED,
|
||||
google_connected: bool | UndefinedType = UNDEFINED,
|
||||
remote_allow_remote_enable: bool | UndefinedType = UNDEFINED,
|
||||
cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED,
|
||||
cloud_user: str | UndefinedType = UNDEFINED,
|
||||
cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED,
|
||||
google_connected: bool | UndefinedType = UNDEFINED,
|
||||
google_enabled: bool | UndefinedType = UNDEFINED,
|
||||
google_report_state: bool | UndefinedType = UNDEFINED,
|
||||
google_secure_devices_pin: str | None | UndefinedType = UNDEFINED,
|
||||
google_settings_version: int | UndefinedType = UNDEFINED,
|
||||
remote_allow_remote_enable: bool | UndefinedType = UNDEFINED,
|
||||
remote_domain: str | None | UndefinedType = UNDEFINED,
|
||||
remote_enabled: bool | UndefinedType = UNDEFINED,
|
||||
tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update user preferences."""
|
||||
prefs = {**self._prefs}
|
||||
|
@ -186,21 +186,21 @@ class CloudPreferences:
|
|||
{
|
||||
key: value
|
||||
for key, value in (
|
||||
(PREF_ENABLE_GOOGLE, google_enabled),
|
||||
(PREF_ENABLE_ALEXA, alexa_enabled),
|
||||
(PREF_ENABLE_REMOTE, remote_enabled),
|
||||
(PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin),
|
||||
(PREF_CLOUDHOOKS, cloudhooks),
|
||||
(PREF_CLOUD_USER, cloud_user),
|
||||
(PREF_ALEXA_REPORT_STATE, alexa_report_state),
|
||||
(PREF_GOOGLE_REPORT_STATE, google_report_state),
|
||||
(PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version),
|
||||
(PREF_GOOGLE_SETTINGS_VERSION, google_settings_version),
|
||||
(PREF_TTS_DEFAULT_VOICE, tts_default_voice),
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
(PREF_GOOGLE_CONNECTED, google_connected),
|
||||
(PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable),
|
||||
(PREF_CLOUD_USER, cloud_user),
|
||||
(PREF_CLOUDHOOKS, cloudhooks),
|
||||
(PREF_ENABLE_ALEXA, alexa_enabled),
|
||||
(PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled),
|
||||
(PREF_ENABLE_GOOGLE, google_enabled),
|
||||
(PREF_ENABLE_REMOTE, remote_enabled),
|
||||
(PREF_GOOGLE_CONNECTED, google_connected),
|
||||
(PREF_GOOGLE_REPORT_STATE, google_report_state),
|
||||
(PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin),
|
||||
(PREF_GOOGLE_SETTINGS_VERSION, google_settings_version),
|
||||
(PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable),
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
(PREF_TTS_DEFAULT_VOICE, tts_default_voice),
|
||||
)
|
||||
if value is not UNDEFINED
|
||||
}
|
||||
|
@ -242,6 +242,7 @@ class CloudPreferences:
|
|||
PREF_ALEXA_REPORT_STATE: self.alexa_report_state,
|
||||
PREF_CLOUDHOOKS: self.cloudhooks,
|
||||
PREF_ENABLE_ALEXA: self.alexa_enabled,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled,
|
||||
PREF_ENABLE_GOOGLE: self.google_enabled,
|
||||
PREF_ENABLE_REMOTE: self.remote_enabled,
|
||||
PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose,
|
||||
|
@ -249,7 +250,6 @@ class CloudPreferences:
|
|||
PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin,
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable,
|
||||
PREF_TTS_DEFAULT_VOICE: self.tts_default_voice,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled,
|
||||
}
|
||||
|
||||
@property
|
||||
|
|
|
@ -168,7 +168,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=get_extra_name(data) or "CO2 Signal",
|
||||
title=get_extra_name(data) or "Electricity Maps",
|
||||
data=data,
|
||||
)
|
||||
|
||||
|
|
|
@ -4,5 +4,5 @@
|
|||
"codeowners": ["@Petro31"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"requirements": ["numpy==1.26.4"]
|
||||
"requirements": ["numpy==2.1.3"]
|
||||
}
|
||||
|
|
|
@ -294,7 +294,7 @@ class DefaultAgent(ConversationEntity):
|
|||
self.hass, language, DOMAIN, [DOMAIN]
|
||||
)
|
||||
response_text = translations.get(
|
||||
f"component.{DOMAIN}.agent.done", "Done"
|
||||
f"component.{DOMAIN}.conversation.agent.done", "Done"
|
||||
)
|
||||
|
||||
response.async_set_speech(response_text)
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.4"]
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.6"]
|
||||
}
|
||||
|
|
|
@ -143,7 +143,7 @@ class CrownstoneConfigFlowHandler(BaseCrownstoneFlowHandler, ConfigFlow, domain=
|
|||
config_entry: ConfigEntry,
|
||||
) -> CrownstoneOptionsFlowHandler:
|
||||
"""Return the Crownstone options."""
|
||||
return CrownstoneOptionsFlowHandler()
|
||||
return CrownstoneOptionsFlowHandler(config_entry)
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the flow."""
|
||||
|
@ -210,9 +210,10 @@ class CrownstoneConfigFlowHandler(BaseCrownstoneFlowHandler, ConfigFlow, domain=
|
|||
class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow):
|
||||
"""Handle Crownstone options."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize Crownstone options."""
|
||||
super().__init__(OPTIONS_FLOW, self.async_create_new_entry)
|
||||
self.options = config_entry.options.copy()
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
|
|
@ -35,7 +35,7 @@ class DemoConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler()
|
||||
return OptionsFlowHandler(config_entry)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Set the config entry up from yaml."""
|
||||
|
@ -45,6 +45,10 @@ class DemoConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle options."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.options = dict(config_entry.options)
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
|
|
@ -5,5 +5,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/doods",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydoods"],
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==10.4.0"]
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.0.0"]
|
||||
}
|
||||
|
|
|
@ -6,9 +6,14 @@ from collections.abc import Awaitable, Callable
|
|||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfTime
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
|
@ -54,21 +59,30 @@ async def async_setup_entry(
|
|||
) -> None:
|
||||
"""Set up the ecobee thermostat number entity."""
|
||||
data: EcobeeData = hass.data[DOMAIN]
|
||||
_LOGGER.debug("Adding min time ventilators numbers (if present)")
|
||||
|
||||
async_add_entities(
|
||||
assert data is not None
|
||||
|
||||
entities: list[NumberEntity] = [
|
||||
EcobeeVentilatorMinTime(data, index, numbers)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
for numbers in VENTILATOR_NUMBERS
|
||||
]
|
||||
|
||||
_LOGGER.debug("Adding compressor min temp number (if present)")
|
||||
entities.extend(
|
||||
(
|
||||
EcobeeVentilatorMinTime(data, index, numbers)
|
||||
EcobeeCompressorMinTemp(data, index)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
for numbers in VENTILATOR_NUMBERS
|
||||
),
|
||||
True,
|
||||
if thermostat["settings"]["hasHeatPump"]
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity):
|
||||
"""A number class, representing min time for an ecobee thermostat with ventilator attached."""
|
||||
"""A number class, representing min time for an ecobee thermostat with ventilator attached."""
|
||||
|
||||
entity_description: EcobeeNumberEntityDescription
|
||||
|
||||
|
@ -105,3 +119,53 @@ class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity):
|
|||
"""Set new ventilator Min On Time value."""
|
||||
self.entity_description.set_fn(self.data, self.thermostat_index, int(value))
|
||||
self.update_without_throttle = True
|
||||
|
||||
|
||||
class EcobeeCompressorMinTemp(EcobeeBaseEntity, NumberEntity):
|
||||
"""Minimum outdoor temperature at which the compressor will operate.
|
||||
|
||||
This applies more to air source heat pumps than geothermal. This serves as a safety
|
||||
feature (compressors have a minimum operating temperature) as well as
|
||||
providing the ability to choose fuel in a dual-fuel system (i.e. choose between
|
||||
electrical heat pump and fossil auxiliary heat depending on Time of Use, Solar,
|
||||
etc.).
|
||||
Note that python-ecobee-api refers to this as Aux Cutover Threshold, but Ecobee
|
||||
uses Compressor Protection Min Temp.
|
||||
"""
|
||||
|
||||
_attr_device_class = NumberDeviceClass.TEMPERATURE
|
||||
_attr_has_entity_name = True
|
||||
_attr_icon = "mdi:thermometer-off"
|
||||
_attr_mode = NumberMode.BOX
|
||||
_attr_native_min_value = -25
|
||||
_attr_native_max_value = 66
|
||||
_attr_native_step = 5
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.FAHRENHEIT
|
||||
_attr_translation_key = "compressor_protection_min_temp"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: EcobeeData,
|
||||
thermostat_index: int,
|
||||
) -> None:
|
||||
"""Initialize ecobee compressor min temperature."""
|
||||
super().__init__(data, thermostat_index)
|
||||
self._attr_unique_id = f"{self.base_unique_id}_compressor_protection_min_temp"
|
||||
self.update_without_throttle = False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest state from the thermostat."""
|
||||
if self.update_without_throttle:
|
||||
await self.data.update(no_throttle=True)
|
||||
self.update_without_throttle = False
|
||||
else:
|
||||
await self.data.update()
|
||||
|
||||
self._attr_native_value = (
|
||||
(self.thermostat["settings"]["compressorProtectionMinTemp"]) / 10
|
||||
)
|
||||
|
||||
def set_native_value(self, value: float) -> None:
|
||||
"""Set new compressor minimum temperature."""
|
||||
self.data.ecobee.set_aux_cutover_threshold(self.thermostat_index, value)
|
||||
self.update_without_throttle = True
|
||||
|
|
|
@ -33,15 +33,18 @@
|
|||
},
|
||||
"number": {
|
||||
"ventilator_min_type_home": {
|
||||
"name": "Ventilator min time home"
|
||||
"name": "Ventilator minimum time home"
|
||||
},
|
||||
"ventilator_min_type_away": {
|
||||
"name": "Ventilator min time away"
|
||||
"name": "Ventilator minimum time away"
|
||||
},
|
||||
"compressor_protection_min_temp": {
|
||||
"name": "Compressor minimum temperature"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"aux_heat_only": {
|
||||
"name": "Aux heat only"
|
||||
"name": "Auxiliary heat only"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -31,25 +31,26 @@ async def async_setup_entry(
|
|||
"""Set up the ecobee thermostat switch entity."""
|
||||
data: EcobeeData = hass.data[DOMAIN]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
EcobeeVentilator20MinSwitch(
|
||||
data,
|
||||
index,
|
||||
(await dt_util.async_get_time_zone(thermostat["location"]["timeZone"]))
|
||||
or dt_util.get_default_time_zone(),
|
||||
)
|
||||
entities: list[SwitchEntity] = [
|
||||
EcobeeVentilator20MinSwitch(
|
||||
data,
|
||||
index,
|
||||
(await dt_util.async_get_time_zone(thermostat["location"]["timeZone"]))
|
||||
or dt_util.get_default_time_zone(),
|
||||
)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
(
|
||||
EcobeeSwitchAuxHeatOnly(data, index)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
],
|
||||
update_before_add=True,
|
||||
if thermostat["settings"]["hasHeatPump"]
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
EcobeeSwitchAuxHeatOnly(data, index)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["hasHeatPump"]
|
||||
)
|
||||
async_add_entities(entities, update_before_add=True)
|
||||
|
||||
|
||||
class EcobeeVentilator20MinSwitch(EcobeeBaseEntity, SwitchEntity):
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==8.4.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==8.4.1"]
|
||||
}
|
||||
|
|
|
@ -5,8 +5,11 @@ from pyemoncms import EmoncmsClient
|
|||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN, EMONCMS_UUID_DOC_URL, LOGGER
|
||||
from .coordinator import EmoncmsCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
@ -14,6 +17,49 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
|
|||
type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator]
|
||||
|
||||
|
||||
def _migrate_unique_id(
|
||||
hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_unique_id: str
|
||||
) -> None:
|
||||
"""Migrate to emoncms unique id if needed."""
|
||||
ent_reg = er.async_get(hass)
|
||||
entry_entities = ent_reg.entities.get_entries_for_config_entry_id(entry.entry_id)
|
||||
for entity in entry_entities:
|
||||
if entity.unique_id.split("-")[0] == entry.entry_id:
|
||||
feed_id = entity.unique_id.split("-")[-1]
|
||||
LOGGER.debug(f"moving feed {feed_id} to hardware uuid")
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}"
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
unique_id=emoncms_unique_id,
|
||||
)
|
||||
|
||||
|
||||
async def _check_unique_id_migration(
|
||||
hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_client: EmoncmsClient
|
||||
) -> None:
|
||||
"""Check if we can migrate to the emoncms uuid."""
|
||||
emoncms_unique_id = await emoncms_client.async_get_uuid()
|
||||
if emoncms_unique_id:
|
||||
if entry.unique_id != emoncms_unique_id:
|
||||
_migrate_unique_id(hass, entry, emoncms_unique_id)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"migrate database",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="migrate_database",
|
||||
translation_placeholders={
|
||||
"url": entry.data[CONF_URL],
|
||||
"doc_url": EMONCMS_UUID_DOC_URL,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool:
|
||||
"""Load a config entry."""
|
||||
emoncms_client = EmoncmsClient(
|
||||
|
@ -21,6 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> b
|
|||
entry.data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
await _check_unique_id_migration(hass, entry, emoncms_client)
|
||||
coordinator = EmoncmsCoordinator(hass, emoncms_client)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
|
|
@ -14,7 +14,7 @@ from homeassistant.config_entries import (
|
|||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import selector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
@ -48,13 +48,10 @@ def sensor_name(url: str) -> str:
|
|||
return f"emoncms@{sensorip}"
|
||||
|
||||
|
||||
async def get_feed_list(hass: HomeAssistant, url: str, api_key: str) -> dict[str, Any]:
|
||||
async def get_feed_list(
|
||||
emoncms_client: EmoncmsClient,
|
||||
) -> dict[str, Any]:
|
||||
"""Check connection to emoncms and return feed list if successful."""
|
||||
emoncms_client = EmoncmsClient(
|
||||
url,
|
||||
api_key,
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
return await emoncms_client.async_request("/feed/list.json")
|
||||
|
||||
|
||||
|
@ -72,7 +69,7 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
config_entry: ConfigEntry,
|
||||
) -> EmoncmsOptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return EmoncmsOptionsFlow()
|
||||
return EmoncmsOptionsFlow(config_entry)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
@ -82,22 +79,25 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
description_placeholders = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.url = user_input[CONF_URL]
|
||||
self.api_key = user_input[CONF_API_KEY]
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
CONF_API_KEY: self.api_key,
|
||||
CONF_URL: self.url,
|
||||
}
|
||||
)
|
||||
result = await get_feed_list(
|
||||
self.hass, user_input[CONF_URL], user_input[CONF_API_KEY]
|
||||
emoncms_client = EmoncmsClient(
|
||||
self.url, self.api_key, session=async_get_clientsession(self.hass)
|
||||
)
|
||||
result = await get_feed_list(emoncms_client)
|
||||
if not result[CONF_SUCCESS]:
|
||||
errors["base"] = "api_error"
|
||||
description_placeholders = {"details": result[CONF_MESSAGE]}
|
||||
else:
|
||||
self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID)
|
||||
self.url = user_input[CONF_URL]
|
||||
self.api_key = user_input[CONF_API_KEY]
|
||||
await self.async_set_unique_id(await emoncms_client.async_get_uuid())
|
||||
self._abort_if_unique_id_configured()
|
||||
options = get_options(result[CONF_MESSAGE])
|
||||
self.dropdown = {
|
||||
"options": options,
|
||||
|
@ -175,18 +175,28 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
class EmoncmsOptionsFlow(OptionsFlow):
|
||||
"""Emoncms Options flow handler."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize emoncms options flow."""
|
||||
self._url = config_entry.data[CONF_URL]
|
||||
self._api_key = config_entry.data[CONF_API_KEY]
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders = {}
|
||||
data = self.options if self.options else self.config_entry.data
|
||||
url = data[CONF_URL]
|
||||
api_key = data[CONF_API_KEY]
|
||||
include_only_feeds = data.get(CONF_ONLY_INCLUDE_FEEDID, [])
|
||||
include_only_feeds = self.config_entry.options.get(
|
||||
CONF_ONLY_INCLUDE_FEEDID,
|
||||
self.config_entry.data.get(CONF_ONLY_INCLUDE_FEEDID, []),
|
||||
)
|
||||
options: list = include_only_feeds
|
||||
result = await get_feed_list(self.hass, url, api_key)
|
||||
emoncms_client = EmoncmsClient(
|
||||
self._url,
|
||||
self._api_key,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
result = await get_feed_list(emoncms_client)
|
||||
if not result[CONF_SUCCESS]:
|
||||
errors["base"] = "api_error"
|
||||
description_placeholders = {"details": result[CONF_MESSAGE]}
|
||||
|
@ -196,10 +206,7 @@ class EmoncmsOptionsFlow(OptionsFlow):
|
|||
if user_input:
|
||||
include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID]
|
||||
return self.async_create_entry(
|
||||
title=sensor_name(url),
|
||||
data={
|
||||
CONF_URL: url,
|
||||
CONF_API_KEY: api_key,
|
||||
CONF_ONLY_INCLUDE_FEEDID: include_only_feeds,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -7,6 +7,10 @@ CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id"
|
|||
CONF_MESSAGE = "message"
|
||||
CONF_SUCCESS = "success"
|
||||
DOMAIN = "emoncms"
|
||||
EMONCMS_UUID_DOC_URL = (
|
||||
"https://docs.openenergymonitor.org/emoncms/update.html"
|
||||
"#upgrading-to-a-version-producing-a-unique-identifier"
|
||||
)
|
||||
FEED_ID = "id"
|
||||
FEED_NAME = "name"
|
||||
FEED_TAG = "tag"
|
||||
|
|
|
@ -138,29 +138,30 @@ async def async_setup_entry(
|
|||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the emoncms sensors."""
|
||||
config = entry.options if entry.options else entry.data
|
||||
name = sensor_name(config[CONF_URL])
|
||||
exclude_feeds = config.get(CONF_EXCLUDE_FEEDID)
|
||||
include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID)
|
||||
name = sensor_name(entry.data[CONF_URL])
|
||||
exclude_feeds = entry.data.get(CONF_EXCLUDE_FEEDID)
|
||||
include_only_feeds = entry.options.get(
|
||||
CONF_ONLY_INCLUDE_FEEDID, entry.data.get(CONF_ONLY_INCLUDE_FEEDID)
|
||||
)
|
||||
|
||||
if exclude_feeds is None and include_only_feeds is None:
|
||||
return
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
# uuid was added in emoncms database 11.5.7
|
||||
unique_id = entry.unique_id if entry.unique_id else entry.entry_id
|
||||
elems = coordinator.data
|
||||
if not elems:
|
||||
return
|
||||
|
||||
sensors: list[EmonCmsSensor] = []
|
||||
|
||||
for idx, elem in enumerate(elems):
|
||||
if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds:
|
||||
continue
|
||||
|
||||
sensors.append(
|
||||
EmonCmsSensor(
|
||||
coordinator,
|
||||
entry.entry_id,
|
||||
unique_id,
|
||||
elem["unit"],
|
||||
name,
|
||||
idx,
|
||||
|
@ -175,7 +176,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
|||
def __init__(
|
||||
self,
|
||||
coordinator: EmoncmsCoordinator,
|
||||
entry_id: str,
|
||||
unique_id: str,
|
||||
unit_of_measurement: str | None,
|
||||
name: str,
|
||||
idx: int,
|
||||
|
@ -188,7 +189,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
|||
elem = self.coordinator.data[self.idx]
|
||||
self._attr_name = f"{name} {elem[FEED_NAME]}"
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._attr_unique_id = f"{entry_id}-{elem[FEED_ID]}"
|
||||
self._attr_unique_id = f"{unique_id}-{elem[FEED_ID]}"
|
||||
if unit_of_measurement in ("kWh", "Wh"):
|
||||
self._attr_device_class = SensorDeviceClass.ENERGY
|
||||
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
"include_only_feed_id": "Choose feeds to include"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "This server is already configured"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
@ -41,6 +44,10 @@
|
|||
"missing_include_only_feed_id": {
|
||||
"title": "No feed synchronized with the {domain} sensor",
|
||||
"description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration."
|
||||
},
|
||||
"migrate_database": {
|
||||
"title": "Upgrade your emoncms version",
|
||||
"description": "Your [emoncms]({url}) does not ship a unique identifier.\n\n Please upgrade to at least version 11.5.7 and migrate your emoncms database.\n\n More info on [emoncms documentation]({doc_url})"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"iot_class": "local_push",
|
||||
"loggers": ["sense_energy"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["sense-energy==0.13.2"]
|
||||
"requirements": ["sense-energy==0.13.3"]
|
||||
}
|
||||
|
|
|
@ -15,17 +15,22 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import DOMAIN, SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED
|
||||
from .const import SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED
|
||||
from .models import Eq3Config, Eq3ConfigEntryData
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
type Eq3ConfigEntry = ConfigEntry[Eq3ConfigEntryData]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
"""Handle config entry setup."""
|
||||
|
||||
mac_address: str | None = entry.unique_id
|
||||
|
@ -53,12 +58,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
ble_device=device,
|
||||
)
|
||||
|
||||
eq3_config_entry = Eq3ConfigEntryData(eq3_config=eq3_config, thermostat=thermostat)
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = eq3_config_entry
|
||||
|
||||
entry.runtime_data = Eq3ConfigEntryData(
|
||||
eq3_config=eq3_config, thermostat=thermostat
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_create_background_task(
|
||||
hass, _async_run_thermostat(hass, entry), entry.entry_id
|
||||
)
|
||||
|
@ -66,29 +70,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
"""Handle config entry unload."""
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
await eq3_config_entry.thermostat.async_disconnect()
|
||||
await entry.runtime_data.thermostat.async_disconnect()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def update_listener(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None:
|
||||
"""Handle config entry update."""
|
||||
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None:
|
||||
"""Run the thermostat."""
|
||||
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id]
|
||||
thermostat = eq3_config_entry.thermostat
|
||||
mac_address = eq3_config_entry.eq3_config.mac_address
|
||||
scan_interval = eq3_config_entry.eq3_config.scan_interval
|
||||
thermostat = entry.runtime_data.thermostat
|
||||
mac_address = entry.runtime_data.eq3_config.mac_address
|
||||
scan_interval = entry.runtime_data.eq3_config.scan_interval
|
||||
|
||||
await _async_reconnect_thermostat(hass, entry)
|
||||
|
||||
|
@ -117,13 +119,14 @@ async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None
|
|||
await asyncio.sleep(scan_interval)
|
||||
|
||||
|
||||
async def _async_reconnect_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def _async_reconnect_thermostat(
|
||||
hass: HomeAssistant, entry: Eq3ConfigEntry
|
||||
) -> None:
|
||||
"""Reconnect the thermostat."""
|
||||
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id]
|
||||
thermostat = eq3_config_entry.thermostat
|
||||
mac_address = eq3_config_entry.eq3_config.mac_address
|
||||
scan_interval = eq3_config_entry.eq3_config.scan_interval
|
||||
thermostat = entry.runtime_data.thermostat
|
||||
mac_address = entry.runtime_data.eq3_config.mac_address
|
||||
scan_interval = entry.runtime_data.eq3_config.scan_interval
|
||||
|
||||
while True:
|
||||
try:
|
||||
|
|
86
homeassistant/components/eq3btsmart/binary_sensor.py
Normal file
86
homeassistant/components/eq3btsmart/binary_sensor.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
"""Platform for eq3 binary sensor entities."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import ENTITY_KEY_BATTERY, ENTITY_KEY_DST, ENTITY_KEY_WINDOW
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3BinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Entity description for eq3 binary sensors."""
|
||||
|
||||
value_func: Callable[[Status], bool]
|
||||
|
||||
|
||||
BINARY_SENSOR_ENTITY_DESCRIPTIONS = [
|
||||
Eq3BinarySensorEntityDescription(
|
||||
value_func=lambda status: status.is_low_battery,
|
||||
key=ENTITY_KEY_BATTERY,
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
Eq3BinarySensorEntityDescription(
|
||||
value_func=lambda status: status.is_window_open,
|
||||
key=ENTITY_KEY_WINDOW,
|
||||
device_class=BinarySensorDeviceClass.WINDOW,
|
||||
),
|
||||
Eq3BinarySensorEntityDescription(
|
||||
value_func=lambda status: status.is_dst,
|
||||
key=ENTITY_KEY_DST,
|
||||
translation_key=ENTITY_KEY_DST,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the entry."""
|
||||
|
||||
async_add_entities(
|
||||
Eq3BinarySensorEntity(entry, entity_description)
|
||||
for entity_description in BINARY_SENSOR_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity):
|
||||
"""Base class for eQ-3 binary sensor entities."""
|
||||
|
||||
entity_description: Eq3BinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: Eq3ConfigEntry,
|
||||
entity_description: Eq3BinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(entry, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the binary sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
|
@ -3,7 +3,6 @@
|
|||
import logging
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
|
||||
|
@ -15,45 +14,35 @@ from homeassistant.components.climate import (
|
|||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEVICE_MODEL,
|
||||
DOMAIN,
|
||||
EQ_TO_HA_HVAC,
|
||||
HA_TO_EQ_HVAC,
|
||||
MANUFACTURER,
|
||||
SIGNAL_THERMOSTAT_CONNECTED,
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED,
|
||||
CurrentTemperatureSelector,
|
||||
Preset,
|
||||
TargetTemperatureSelector,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
from .models import Eq3Config, Eq3ConfigEntryData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Handle config entry setup."""
|
||||
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
async_add_entities(
|
||||
[Eq3Climate(eq3_config_entry.eq3_config, eq3_config_entry.thermostat)],
|
||||
[Eq3Climate(entry)],
|
||||
)
|
||||
|
||||
|
||||
|
@ -80,53 +69,6 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
_attr_preset_mode: str | None = None
|
||||
_target_temperature: float | None = None
|
||||
|
||||
def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
|
||||
super().__init__(eq3_config, thermostat)
|
||||
self._attr_unique_id = dr.format_mac(eq3_config.mac_address)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=slugify(self._eq3_config.mac_address),
|
||||
manufacturer=MANUFACTURER,
|
||||
model=DEVICE_MODEL,
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_disconnected,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_connected,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
|
||||
@callback
|
||||
def _async_on_disconnected(self) -> None:
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_connected(self) -> None:
|
||||
self._attr_available = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
@ -137,12 +79,15 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
if self._thermostat.device_data is not None:
|
||||
self._async_on_device_updated()
|
||||
|
||||
self.async_write_ha_state()
|
||||
super()._async_on_updated()
|
||||
|
||||
@callback
|
||||
def _async_on_status_updated(self) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
if self._thermostat.status is None:
|
||||
return
|
||||
|
||||
self._target_temperature = self._thermostat.status.target_temperature.value
|
||||
self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode]
|
||||
self._attr_current_temperature = self._get_current_temperature()
|
||||
|
@ -154,13 +99,16 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
def _async_on_device_updated(self) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
if self._thermostat.device_data is None:
|
||||
return
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
if device := device_registry.async_get_device(
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
sw_version=self._thermostat.device_data.firmware_version,
|
||||
sw_version=str(self._thermostat.device_data.firmware_version),
|
||||
serial_number=self._thermostat.device_data.device_serial.value,
|
||||
)
|
||||
|
||||
|
@ -265,7 +213,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
self.async_write_ha_state()
|
||||
|
||||
try:
|
||||
await self._thermostat.async_set_temperature(self._target_temperature)
|
||||
await self._thermostat.async_set_temperature(temperature)
|
||||
except Eq3Exception:
|
||||
_LOGGER.error(
|
||||
"[%s] Failed setting temperature", self._eq3_config.mac_address
|
||||
|
|
|
@ -18,8 +18,14 @@ DOMAIN = "eq3btsmart"
|
|||
MANUFACTURER = "eQ-3 AG"
|
||||
DEVICE_MODEL = "CC-RT-BLE-EQ"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
ENTITY_KEY_DST = "dst"
|
||||
ENTITY_KEY_BATTERY = "battery"
|
||||
ENTITY_KEY_WINDOW = "window"
|
||||
ENTITY_KEY_LOCK = "lock"
|
||||
ENTITY_KEY_BOOST = "boost"
|
||||
ENTITY_KEY_AWAY = "away"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
|
||||
EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = {
|
||||
OperationMode.OFF: HVACMode.OFF,
|
||||
|
|
|
@ -1,10 +1,22 @@
|
|||
"""Base class for all eQ-3 entities."""
|
||||
|
||||
from eq3btsmart.thermostat import Thermostat
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .models import Eq3Config
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEVICE_MODEL,
|
||||
MANUFACTURER,
|
||||
SIGNAL_THERMOSTAT_CONNECTED,
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED,
|
||||
)
|
||||
|
||||
|
||||
class Eq3Entity(Entity):
|
||||
|
@ -12,8 +24,70 @@ class Eq3Entity(Entity):
|
|||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
entry: Eq3ConfigEntry,
|
||||
unique_id_key: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize the eq3 entity."""
|
||||
|
||||
self._eq3_config = eq3_config
|
||||
self._thermostat = thermostat
|
||||
self._eq3_config = entry.runtime_data.eq3_config
|
||||
self._thermostat = entry.runtime_data.thermostat
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=slugify(self._eq3_config.mac_address),
|
||||
manufacturer=MANUFACTURER,
|
||||
model=DEVICE_MODEL,
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
)
|
||||
suffix = f"_{unique_id_key}" if unique_id_key else ""
|
||||
self._attr_unique_id = f"{format_mac(self._eq3_config.mac_address)}{suffix}"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_disconnected,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_connected,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_disconnected(self) -> None:
|
||||
"""Handle disconnection from the thermostat."""
|
||||
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_connected(self) -> None:
|
||||
"""Handle connection to the thermostat."""
|
||||
|
||||
self._attr_available = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available."""
|
||||
|
||||
return self._thermostat.status is not None and self._attr_available
|
||||
|
|
32
homeassistant/components/eq3btsmart/icons.json
Normal file
32
homeassistant/components/eq3btsmart/icons.json
Normal file
|
@ -0,0 +1,32 @@
|
|||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"dst": {
|
||||
"default": "mdi:sun-clock",
|
||||
"state": {
|
||||
"off": "mdi:sun-clock-outline"
|
||||
}
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"away": {
|
||||
"default": "mdi:home-account",
|
||||
"state": {
|
||||
"on": "mdi:home-export"
|
||||
}
|
||||
},
|
||||
"lock": {
|
||||
"default": "mdi:lock",
|
||||
"state": {
|
||||
"off": "mdi:lock-off"
|
||||
}
|
||||
},
|
||||
"boost": {
|
||||
"default": "mdi:fire",
|
||||
"state": {
|
||||
"off": "mdi:fire-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -23,5 +23,5 @@
|
|||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["eq3btsmart==1.2.0", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.2.1", "bleak-esphome==1.1.0"]
|
||||
}
|
||||
|
|
|
@ -18,5 +18,23 @@
|
|||
"error": {
|
||||
"invalid_mac_address": "Invalid MAC address"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"dst": {
|
||||
"name": "Daylight saving time"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"lock": {
|
||||
"name": "Lock"
|
||||
},
|
||||
"boost": {
|
||||
"name": "Boost"
|
||||
},
|
||||
"away": {
|
||||
"name": "Away"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
94
homeassistant/components/eq3btsmart/switch.py
Normal file
94
homeassistant/components/eq3btsmart/switch.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
"""Platform for eq3 switch entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3SwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Entity description for eq3 switch entities."""
|
||||
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]]
|
||||
value_func: Callable[[Status], bool]
|
||||
|
||||
|
||||
SWITCH_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_LOCK,
|
||||
translation_key=ENTITY_KEY_LOCK,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_locked,
|
||||
value_func=lambda status: status.is_locked,
|
||||
),
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_BOOST,
|
||||
translation_key=ENTITY_KEY_BOOST,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_boost,
|
||||
value_func=lambda status: status.is_boost,
|
||||
),
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_AWAY,
|
||||
translation_key=ENTITY_KEY_AWAY,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_away,
|
||||
value_func=lambda status: status.is_away,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the entry."""
|
||||
|
||||
async_add_entities(
|
||||
Eq3SwitchEntity(entry, entity_description)
|
||||
for entity_description in SWITCH_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class Eq3SwitchEntity(Eq3Entity, SwitchEntity):
|
||||
"""Base class for eq3 switch entities."""
|
||||
|
||||
entity_description: Eq3SwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: Eq3ConfigEntry,
|
||||
entity_description: Eq3SwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(entry, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the switch."""
|
||||
|
||||
await self.entity_description.toggle_func(self._thermostat)(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the switch."""
|
||||
|
||||
await self.entity_description.toggle_func(self._thermostat)(False)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the switch."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
|
@ -257,6 +257,9 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
self, discovery_info: MqttServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle MQTT discovery."""
|
||||
if not discovery_info.payload:
|
||||
return self.async_abort(reason="mqtt_missing_payload")
|
||||
|
||||
device_info = json_loads_object(discovery_info.payload)
|
||||
if "mac" not in device_info:
|
||||
return self.async_abort(reason="mqtt_missing_mac")
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
"service_received": "Action received",
|
||||
"mqtt_missing_mac": "Missing MAC address in MQTT properties.",
|
||||
"mqtt_missing_api": "Missing API port in MQTT properties.",
|
||||
"mqtt_missing_ip": "Missing IP address in MQTT properties."
|
||||
"mqtt_missing_ip": "Missing IP address in MQTT properties.",
|
||||
"mqtt_missing_payload": "Missing MQTT Payload."
|
||||
},
|
||||
"error": {
|
||||
"resolve_error": "Can't resolve address of the ESP. If this error persists, please set a static IP address",
|
||||
|
|
|
@ -73,11 +73,9 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity):
|
|||
return self.data["version"]
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool | int | None:
|
||||
def in_progress(self) -> bool:
|
||||
"""Update installation progress."""
|
||||
if self.data["upgrade_in_progress"]:
|
||||
return self.data["upgrade_percent"]
|
||||
return False
|
||||
return bool(self.data["upgrade_in_progress"])
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
|
@ -93,6 +91,13 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity):
|
|||
return self.data["latest_firmware_info"].get("desc")
|
||||
return None
|
||||
|
||||
@property
|
||||
def update_percentage(self) -> int | None:
|
||||
"""Update installation progress."""
|
||||
if self.data["upgrade_in_progress"]:
|
||||
return self.data["upgrade_percent"]
|
||||
return None
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
|
|
|
@ -4,5 +4,5 @@
|
|||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ffmpeg",
|
||||
"integration_type": "system",
|
||||
"requirements": ["ha-ffmpeg==3.2.1"]
|
||||
"requirements": ["ha-ffmpeg==3.2.2"]
|
||||
}
|
||||
|
|
|
@ -3,88 +3,16 @@
|
|||
from copy import deepcopy
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.notify import migrate_notify_issue
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_NAME,
|
||||
CONF_PLATFORM,
|
||||
CONF_SCAN_INTERVAL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
discovery,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .notify import PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA
|
||||
from .sensor import PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA
|
||||
|
||||
IMPORT_SCHEMA = {
|
||||
Platform.SENSOR: SENSOR_PLATFORM_SCHEMA,
|
||||
Platform.NOTIFY: NOTIFY_PLATFORM_SCHEMA,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the file integration."""
|
||||
|
||||
hass.data[DOMAIN] = config
|
||||
if hass.config_entries.async_entries(DOMAIN):
|
||||
# We skip import in case we already have config entries
|
||||
return True
|
||||
# The use of the legacy notify service was deprecated with HA Core 2024.6.0
|
||||
# and will be removed with HA Core 2024.12
|
||||
migrate_notify_issue(hass, DOMAIN, "File", "2024.12.0")
|
||||
# The YAML config was imported with HA Core 2024.6.0 and will be removed with
|
||||
# HA Core 2024.12
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
learn_more_url="https://www.home-assistant.io/integrations/file/",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "File",
|
||||
},
|
||||
)
|
||||
|
||||
# Import the YAML config into separate config entries
|
||||
platforms_config: dict[Platform, list[ConfigType]] = {
|
||||
domain: config[domain] for domain in PLATFORMS if domain in config
|
||||
}
|
||||
for domain, items in platforms_config.items():
|
||||
for item in items:
|
||||
if item[CONF_PLATFORM] == DOMAIN:
|
||||
file_config_item = IMPORT_SCHEMA[domain](item)
|
||||
file_config_item[CONF_PLATFORM] = domain
|
||||
if CONF_SCAN_INTERVAL in file_config_item:
|
||||
del file_config_item[CONF_SCAN_INTERVAL]
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=file_config_item,
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a file component entry."""
|
||||
config = {**entry.data, **entry.options}
|
||||
|
@ -102,20 +30,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
entry, [Platform(entry.data[CONF_PLATFORM])]
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data:
|
||||
# New notify entities are being setup through the config entry,
|
||||
# but during the deprecation period we want to keep the legacy notify platform,
|
||||
# so we forward the setup config through discovery.
|
||||
# Only the entities from yaml will still be available as legacy service.
|
||||
hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
hass,
|
||||
Platform.NOTIFY,
|
||||
DOMAIN,
|
||||
config,
|
||||
hass.data[DOMAIN],
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from copy import deepcopy
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
@ -16,7 +15,6 @@ from homeassistant.config_entries import (
|
|||
)
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_NAME,
|
||||
CONF_PLATFORM,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
|
@ -132,27 +130,6 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
"""Handle file sensor config flow."""
|
||||
return await self._async_handle_step(Platform.SENSOR.value, user_input)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import `file`` config from configuration.yaml."""
|
||||
self._async_abort_entries_match(import_data)
|
||||
platform = import_data[CONF_PLATFORM]
|
||||
name: str = import_data.get(CONF_NAME, DEFAULT_NAME)
|
||||
file_name: str
|
||||
if platform == Platform.NOTIFY:
|
||||
file_name = import_data.pop(CONF_FILENAME)
|
||||
file_path: str = os.path.join(self.hass.config.config_dir, file_name)
|
||||
import_data[CONF_FILE_PATH] = file_path
|
||||
else:
|
||||
file_path = import_data[CONF_FILE_PATH]
|
||||
title = f"{name} [{file_path}]"
|
||||
data = deepcopy(import_data)
|
||||
options = {}
|
||||
for key, value in import_data.items():
|
||||
if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME):
|
||||
data.pop(key)
|
||||
options[key] = value
|
||||
return self.async_create_entry(title=title, data=data, options=options)
|
||||
|
||||
|
||||
class FileOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle File options."""
|
||||
|
|
|
@ -2,104 +2,23 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, TextIO
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_TITLE,
|
||||
ATTR_TITLE_DEFAULT,
|
||||
PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA,
|
||||
BaseNotificationService,
|
||||
NotifyEntity,
|
||||
NotifyEntityFeature,
|
||||
migrate_notify_issue,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME, CONF_NAME
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN, FILE_ICON
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The legacy platform schema uses a filename, after import
|
||||
# The full file path is stored in the config entry
|
||||
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILENAME): cv.string,
|
||||
vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> FileNotificationService | None:
|
||||
"""Get the file notification service."""
|
||||
if discovery_info is None:
|
||||
# We only set up through discovery
|
||||
return None
|
||||
file_path: str = discovery_info[CONF_FILE_PATH]
|
||||
timestamp: bool = discovery_info[CONF_TIMESTAMP]
|
||||
|
||||
return FileNotificationService(file_path, timestamp)
|
||||
|
||||
|
||||
class FileNotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for the File service."""
|
||||
|
||||
def __init__(self, file_path: str, add_timestamp: bool) -> None:
|
||||
"""Initialize the service."""
|
||||
self._file_path = file_path
|
||||
self.add_timestamp = add_timestamp
|
||||
|
||||
async def async_send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a file."""
|
||||
# The use of the legacy notify service was deprecated with HA Core 2024.6.0
|
||||
# and will be removed with HA Core 2024.12
|
||||
migrate_notify_issue(
|
||||
self.hass, DOMAIN, "File", "2024.12.0", service_name=self._service_name
|
||||
)
|
||||
await self.hass.async_add_executor_job(
|
||||
partial(self.send_message, message, **kwargs)
|
||||
)
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a file."""
|
||||
file: TextIO
|
||||
filepath = self._file_path
|
||||
try:
|
||||
with open(filepath, "a", encoding="utf8") as file:
|
||||
if os.stat(filepath).st_size == 0:
|
||||
title = (
|
||||
f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log"
|
||||
f" started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n"
|
||||
)
|
||||
file.write(title)
|
||||
|
||||
if self.add_timestamp:
|
||||
text = f"{dt_util.utcnow().isoformat()} {message}\n"
|
||||
else:
|
||||
text = f"{message}\n"
|
||||
file.write(text)
|
||||
except OSError as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="write_access_failed",
|
||||
translation_placeholders={"filename": filepath, "exc": f"{exc!r}"},
|
||||
) from exc
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
|
|
@ -6,12 +6,8 @@ import logging
|
|||
import os
|
||||
|
||||
from file_read_backwards import FileReadBackwards
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
|
@ -20,38 +16,13 @@ from homeassistant.const import (
|
|||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DEFAULT_NAME, FILE_ICON
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILE_PATH): cv.isfile,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the file sensor from YAML.
|
||||
|
||||
The YAML platform config is automatically
|
||||
imported to a config entry, this method can be removed
|
||||
when YAML support is removed.
|
||||
"""
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
},
|
||||
"data_description": {
|
||||
"file_path": "The local file path to retrieve the sensor value from",
|
||||
"value_template": "A template to render the the sensors value based on the file content",
|
||||
"value_template": "A template to render the sensors value based on the file content",
|
||||
"unit_of_measurement": "Unit of measurement for the sensor"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -57,6 +57,8 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
VERSION = 1
|
||||
|
||||
_host: str
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
|
@ -67,7 +69,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize FRITZ!Box Tools flow."""
|
||||
self._host: str | None = None
|
||||
self._name: str = ""
|
||||
self._password: str = ""
|
||||
self._use_tls: bool = False
|
||||
|
@ -112,7 +113,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
async def async_check_configured_entry(self) -> ConfigEntry | None:
|
||||
"""Check if entry is configured."""
|
||||
assert self._host
|
||||
current_host = await self.hass.async_add_executor_job(
|
||||
socket.gethostbyname, self._host
|
||||
)
|
||||
|
@ -154,15 +154,17 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by discovery."""
|
||||
ssdp_location: ParseResult = urlparse(discovery_info.ssdp_location or "")
|
||||
self._host = ssdp_location.hostname
|
||||
host = ssdp_location.hostname
|
||||
if not host or ipaddress.ip_address(host).is_link_local:
|
||||
return self.async_abort(reason="ignore_ip6_link_local")
|
||||
|
||||
self._host = host
|
||||
self._name = (
|
||||
discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME)
|
||||
or discovery_info.upnp[ssdp.ATTR_UPNP_MODEL_NAME]
|
||||
)
|
||||
|
||||
if not self._host or ipaddress.ip_address(self._host).is_link_local:
|
||||
return self.async_abort(reason="ignore_ip6_link_local")
|
||||
|
||||
uuid: str | None
|
||||
if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN):
|
||||
if uuid.startswith("uuid:"):
|
||||
uuid = uuid[5:]
|
||||
|
|
|
@ -43,10 +43,11 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
VERSION = 1
|
||||
|
||||
_name: str
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize flow."""
|
||||
self._host: str | None = None
|
||||
self._name: str | None = None
|
||||
self._password: str | None = None
|
||||
self._username: str | None = None
|
||||
|
||||
|
@ -158,7 +159,6 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
result = await self.async_try_connect()
|
||||
|
||||
if result == RESULT_SUCCESS:
|
||||
assert self._name is not None
|
||||
return self._get_entry(self._name)
|
||||
if result != RESULT_INVALID_AUTH:
|
||||
return self.async_abort(reason=result)
|
||||
|
|
|
@ -20,5 +20,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20241105.0"]
|
||||
"requirements": ["home-assistant-frontend==20241106.2"]
|
||||
}
|
||||
|
|
|
@ -7,5 +7,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==13.1.0", "Pillow==10.4.0"]
|
||||
"requirements": ["av==13.1.0", "Pillow==11.0.0"]
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"step": {
|
||||
"user": {
|
||||
"title": "Add generic hygrostat",
|
||||
"description": "Create a entity that control the humidity via a switch and sensor.",
|
||||
"description": "Create a humidifier entity that control the humidity via a switch and sensor.",
|
||||
"data": {
|
||||
"device_class": "Device class",
|
||||
"dry_tolerance": "Dry tolerance",
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Add generic thermostat helper",
|
||||
"title": "Add generic thermostat",
|
||||
"description": "Create a climate entity that controls the temperature via a switch and sensor.",
|
||||
"data": {
|
||||
"ac_mode": "Cooling mode",
|
||||
|
@ -17,8 +17,8 @@
|
|||
"data_description": {
|
||||
"ac_mode": "Set the actuator specified to be treated as a cooling device instead of a heating device.",
|
||||
"heater": "Switch entity used to cool or heat depending on A/C mode.",
|
||||
"target_sensor": "Temperature sensor that reflect the current temperature.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on. This option will be ignored if the keep alive option is set.",
|
||||
"target_sensor": "Temperature sensor that reflects the current temperature.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.",
|
||||
"cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor equals or goes below 24.5.",
|
||||
"hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5."
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@ import aiohttp
|
|||
from geniushubclient import GeniusHub
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
|
@ -21,20 +20,12 @@ from homeassistant.const import (
|
|||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
@ -45,27 +36,6 @@ SCAN_INTERVAL = timedelta(seconds=60)
|
|||
|
||||
MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$"
|
||||
|
||||
CLOUD_API_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOKEN): cv.string,
|
||||
vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
LOCAL_API_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP),
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.Any(LOCAL_API_SCHEMA, CLOUD_API_SCHEMA)}, extra=vol.ALLOW_EXTRA
|
||||
)
|
||||
|
||||
ATTR_ZONE_MODE = "mode"
|
||||
ATTR_DURATION = "duration"
|
||||
|
||||
|
@ -100,56 +70,6 @@ PLATFORMS = [
|
|||
]
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, base_config: ConfigType) -> None:
|
||||
"""Import a config entry from configuration.yaml."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data=base_config[DOMAIN],
|
||||
)
|
||||
if (
|
||||
result["type"] is FlowResultType.CREATE_ENTRY
|
||||
or result["reason"] == "already_configured"
|
||||
):
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Genius Hub",
|
||||
},
|
||||
)
|
||||
return
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2024.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Genius Hub",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool:
|
||||
"""Set up a Genius Hub system."""
|
||||
if DOMAIN in base_config:
|
||||
hass.async_create_task(_async_import(hass, base_config))
|
||||
return True
|
||||
|
||||
|
||||
type GeniusHubConfigEntry = ConfigEntry[GeniusBroker]
|
||||
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ import voluptuous as vol
|
|||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
@ -123,14 +122,3 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
return self.async_show_form(
|
||||
step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import the yaml config."""
|
||||
if CONF_HOST in import_data:
|
||||
result = await self.async_step_local_api(import_data)
|
||||
else:
|
||||
result = await self.async_step_cloud_api(import_data)
|
||||
if result["type"] is FlowResultType.FORM:
|
||||
assert result["errors"]
|
||||
return self.async_abort(reason=result["errors"]["base"])
|
||||
return result
|
||||
|
|
|
@ -4,6 +4,7 @@ import logging
|
|||
import shutil
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError
|
||||
from awesomeversion import AwesomeVersion
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError
|
||||
from go2rtc_client.ws import (
|
||||
|
@ -32,13 +33,23 @@ from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
|||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
discovery_flow,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.package import is_docker_env
|
||||
|
||||
from .const import CONF_DEBUG_UI, DEBUG_UI_URL_MESSAGE, DOMAIN, HA_MANAGED_URL
|
||||
from .const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_URL,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
from .server import Server
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@ -147,7 +158,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
# Validate the server URL
|
||||
try:
|
||||
client = Go2RtcRestClient(async_get_clientsession(hass), url)
|
||||
await client.validate_server_version()
|
||||
version = await client.validate_server_version()
|
||||
if version < AwesomeVersion(RECOMMENDED_VERSION):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"recommended_version",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="recommended_version",
|
||||
translation_placeholders={
|
||||
"recommended_version": RECOMMENDED_VERSION,
|
||||
"current_version": str(version),
|
||||
},
|
||||
)
|
||||
except Go2RtcClientError as err:
|
||||
if isinstance(err.__cause__, _RETRYABLE_ERRORS):
|
||||
raise ConfigEntryNotReady(
|
||||
|
@ -222,7 +247,16 @@ class WebRTCProvider(CameraWebRTCProvider):
|
|||
if (stream := streams.get(camera.entity_id)) is None or not any(
|
||||
stream_source == producer.url for producer in stream.producers
|
||||
):
|
||||
await self._rest_client.streams.add(camera.entity_id, stream_source)
|
||||
await self._rest_client.streams.add(
|
||||
camera.entity_id,
|
||||
[
|
||||
stream_source,
|
||||
# We are setting any ffmpeg rtsp related logs to debug
|
||||
# Connection problems to the camera will be logged by the first stream
|
||||
# Therefore setting it to debug will not hide any important logs
|
||||
f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug",
|
||||
],
|
||||
)
|
||||
|
||||
@callback
|
||||
def on_messages(message: ReceiveMessages) -> None:
|
||||
|
|
|
@ -6,3 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
|
|||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
RECOMMENDED_VERSION = "1.9.7"
|
||||
|
|
|
@ -7,6 +7,6 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/go2rtc",
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["go2rtc-client==0.0.1b3"],
|
||||
"requirements": ["go2rtc-client==0.1.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
|
|
@ -24,14 +24,15 @@ _RESPAWN_COOLDOWN = 1
|
|||
|
||||
# Default configuration for HA
|
||||
# - Api is listening only on localhost
|
||||
# - Disable rtsp listener
|
||||
# - Enable rtsp for localhost only as ffmpeg needs it
|
||||
# - Clear default ice servers
|
||||
_GO2RTC_CONFIG_FORMAT = r"""
|
||||
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
||||
# Do not edit it manually
|
||||
|
||||
api:
|
||||
listen: "{api_ip}:{api_port}"
|
||||
|
||||
rtsp:
|
||||
# ffmpeg needs rtsp for opus audio transcoding
|
||||
listen: "127.0.0.1:18554"
|
||||
|
||||
webrtc:
|
||||
|
|
8
homeassistant/components/go2rtc/strings.json
Normal file
8
homeassistant/components/go2rtc/strings.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"issues": {
|
||||
"recommended_version": {
|
||||
"title": "Outdated go2rtc server detected",
|
||||
"description": "We detected that you are using an outdated go2rtc server version. For the best experience, we recommend updating the go2rtc server to version `{recommended_version}`.\nCurrently you are using version `{current_version}`."
|
||||
}
|
||||
}
|
||||
}
|
|
@ -87,8 +87,8 @@
|
|||
}
|
||||
},
|
||||
"create_event": {
|
||||
"name": "Creates event",
|
||||
"description": "Add a new calendar event.",
|
||||
"name": "Create event",
|
||||
"description": "Adds a new calendar event.",
|
||||
"fields": {
|
||||
"summary": {
|
||||
"name": "Summary",
|
||||
|
|
|
@ -78,6 +78,7 @@ TYPE_AWNING = f"{PREFIX_TYPES}AWNING"
|
|||
TYPE_BLINDS = f"{PREFIX_TYPES}BLINDS"
|
||||
TYPE_CAMERA = f"{PREFIX_TYPES}CAMERA"
|
||||
TYPE_CURTAIN = f"{PREFIX_TYPES}CURTAIN"
|
||||
TYPE_CARBON_MONOXIDE_DETECTOR = f"{PREFIX_TYPES}CARBON_MONOXIDE_DETECTOR"
|
||||
TYPE_DEHUMIDIFIER = f"{PREFIX_TYPES}DEHUMIDIFIER"
|
||||
TYPE_DOOR = f"{PREFIX_TYPES}DOOR"
|
||||
TYPE_DOORBELL = f"{PREFIX_TYPES}DOORBELL"
|
||||
|
@ -93,6 +94,7 @@ TYPE_SCENE = f"{PREFIX_TYPES}SCENE"
|
|||
TYPE_SENSOR = f"{PREFIX_TYPES}SENSOR"
|
||||
TYPE_SETTOP = f"{PREFIX_TYPES}SETTOP"
|
||||
TYPE_SHUTTER = f"{PREFIX_TYPES}SHUTTER"
|
||||
TYPE_SMOKE_DETECTOR = f"{PREFIX_TYPES}SMOKE_DETECTOR"
|
||||
TYPE_SPEAKER = f"{PREFIX_TYPES}SPEAKER"
|
||||
TYPE_SWITCH = f"{PREFIX_TYPES}SWITCH"
|
||||
TYPE_THERMOSTAT = f"{PREFIX_TYPES}THERMOSTAT"
|
||||
|
@ -136,6 +138,7 @@ EVENT_SYNC_RECEIVED = "google_assistant_sync"
|
|||
|
||||
DOMAIN_TO_GOOGLE_TYPES = {
|
||||
alarm_control_panel.DOMAIN: TYPE_ALARM,
|
||||
binary_sensor.DOMAIN: TYPE_SENSOR,
|
||||
button.DOMAIN: TYPE_SCENE,
|
||||
camera.DOMAIN: TYPE_CAMERA,
|
||||
climate.DOMAIN: TYPE_THERMOSTAT,
|
||||
|
@ -168,6 +171,14 @@ DEVICE_CLASS_TO_GOOGLE_TYPES = {
|
|||
binary_sensor.DOMAIN,
|
||||
binary_sensor.BinarySensorDeviceClass.GARAGE_DOOR,
|
||||
): TYPE_GARAGE,
|
||||
(
|
||||
binary_sensor.DOMAIN,
|
||||
binary_sensor.BinarySensorDeviceClass.SMOKE,
|
||||
): TYPE_SMOKE_DETECTOR,
|
||||
(
|
||||
binary_sensor.DOMAIN,
|
||||
binary_sensor.BinarySensorDeviceClass.CO,
|
||||
): TYPE_CARBON_MONOXIDE_DETECTOR,
|
||||
(cover.DOMAIN, cover.CoverDeviceClass.AWNING): TYPE_AWNING,
|
||||
(cover.DOMAIN, cover.CoverDeviceClass.CURTAIN): TYPE_CURTAIN,
|
||||
(cover.DOMAIN, cover.CoverDeviceClass.DOOR): TYPE_DOOR,
|
||||
|
|
|
@ -2706,6 +2706,21 @@ class SensorStateTrait(_Trait):
|
|||
),
|
||||
}
|
||||
|
||||
binary_sensor_types = {
|
||||
binary_sensor.BinarySensorDeviceClass.CO: (
|
||||
"CarbonMonoxideLevel",
|
||||
["carbon monoxide detected", "no carbon monoxide detected", "unknown"],
|
||||
),
|
||||
binary_sensor.BinarySensorDeviceClass.SMOKE: (
|
||||
"SmokeLevel",
|
||||
["smoke detected", "no smoke detected", "unknown"],
|
||||
),
|
||||
binary_sensor.BinarySensorDeviceClass.MOISTURE: (
|
||||
"WaterLeak",
|
||||
["leak", "no leak", "unknown"],
|
||||
),
|
||||
}
|
||||
|
||||
name = TRAIT_SENSOR_STATE
|
||||
commands: list[str] = []
|
||||
|
||||
|
@ -2728,24 +2743,37 @@ class SensorStateTrait(_Trait):
|
|||
@classmethod
|
||||
def supported(cls, domain, features, device_class, _):
|
||||
"""Test if state is supported."""
|
||||
return domain == sensor.DOMAIN and device_class in cls.sensor_types
|
||||
return (domain == sensor.DOMAIN and device_class in cls.sensor_types) or (
|
||||
domain == binary_sensor.DOMAIN and device_class in cls.binary_sensor_types
|
||||
)
|
||||
|
||||
def sync_attributes(self) -> dict[str, Any]:
|
||||
"""Return attributes for a sync request."""
|
||||
device_class = self.state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
data = self.sensor_types.get(device_class)
|
||||
|
||||
if device_class is None or data is None:
|
||||
return {}
|
||||
def create_sensor_state(
|
||||
name: str,
|
||||
raw_value_unit: str | None = None,
|
||||
available_states: list[str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
sensor_state: dict[str, Any] = {
|
||||
"name": name,
|
||||
}
|
||||
if raw_value_unit:
|
||||
sensor_state["numericCapabilities"] = {"rawValueUnit": raw_value_unit}
|
||||
if available_states:
|
||||
sensor_state["descriptiveCapabilities"] = {
|
||||
"availableStates": available_states
|
||||
}
|
||||
return {"sensorStatesSupported": [sensor_state]}
|
||||
|
||||
sensor_state = {
|
||||
"name": data[0],
|
||||
"numericCapabilities": {"rawValueUnit": data[1]},
|
||||
}
|
||||
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
sensor_state["descriptiveCapabilities"] = {
|
||||
"availableStates": [
|
||||
if self.state.domain == sensor.DOMAIN:
|
||||
sensor_data = self.sensor_types.get(device_class)
|
||||
if device_class is None or sensor_data is None:
|
||||
return {}
|
||||
available_states: list[str] | None = None
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
available_states = [
|
||||
"healthy",
|
||||
"moderate",
|
||||
"unhealthy for sensitive groups",
|
||||
|
@ -2753,30 +2781,53 @@ class SensorStateTrait(_Trait):
|
|||
"very unhealthy",
|
||||
"hazardous",
|
||||
"unknown",
|
||||
],
|
||||
}
|
||||
|
||||
return {"sensorStatesSupported": [sensor_state]}
|
||||
]
|
||||
return create_sensor_state(sensor_data[0], sensor_data[1], available_states)
|
||||
binary_sensor_data = self.binary_sensor_types.get(device_class)
|
||||
if device_class is None or binary_sensor_data is None:
|
||||
return {}
|
||||
return create_sensor_state(
|
||||
binary_sensor_data[0], available_states=binary_sensor_data[1]
|
||||
)
|
||||
|
||||
def query_attributes(self) -> dict[str, Any]:
|
||||
"""Return the attributes of this trait for this entity."""
|
||||
device_class = self.state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
data = self.sensor_types.get(device_class)
|
||||
|
||||
if device_class is None or data is None:
|
||||
def create_sensor_state(
|
||||
name: str, raw_value: float | None = None, current_state: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
sensor_state: dict[str, Any] = {
|
||||
"name": name,
|
||||
"rawValue": raw_value,
|
||||
}
|
||||
if current_state:
|
||||
sensor_state["currentSensorState"] = current_state
|
||||
return {"currentSensorStateData": [sensor_state]}
|
||||
|
||||
if self.state.domain == sensor.DOMAIN:
|
||||
sensor_data = self.sensor_types.get(device_class)
|
||||
if device_class is None or sensor_data is None:
|
||||
return {}
|
||||
try:
|
||||
value = float(self.state.state)
|
||||
except ValueError:
|
||||
value = None
|
||||
if self.state.state == STATE_UNKNOWN:
|
||||
value = None
|
||||
current_state: str | None = None
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
current_state = self._air_quality_description_for_aqi(value)
|
||||
return create_sensor_state(sensor_data[0], value, current_state)
|
||||
|
||||
binary_sensor_data = self.binary_sensor_types.get(device_class)
|
||||
if device_class is None or binary_sensor_data is None:
|
||||
return {}
|
||||
|
||||
try:
|
||||
value = float(self.state.state)
|
||||
except ValueError:
|
||||
value = None
|
||||
if self.state.state == STATE_UNKNOWN:
|
||||
value = None
|
||||
sensor_data = {"name": data[0], "rawValue": value}
|
||||
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
sensor_data["currentSensorState"] = self._air_quality_description_for_aqi(
|
||||
value
|
||||
)
|
||||
|
||||
return {"currentSensorStateData": [sensor_data]}
|
||||
value = {
|
||||
STATE_ON: 0,
|
||||
STATE_OFF: 1,
|
||||
STATE_UNKNOWN: 2,
|
||||
}[self.state.state]
|
||||
return create_sensor_state(
|
||||
binary_sensor_data[0], current_state=binary_sensor_data[1][value]
|
||||
)
|
||||
|
|
|
@ -169,7 +169,7 @@ class GoogleCloudOptionsFlowHandler(OptionsFlow):
|
|||
)
|
||||
),
|
||||
**tts_options_schema(
|
||||
self.options, voices, from_config_flow=True
|
||||
self.config_entry.options, voices, from_config_flow=True
|
||||
).schema,
|
||||
vol.Optional(
|
||||
CONF_STT_MODEL,
|
||||
|
@ -182,6 +182,6 @@ class GoogleCloudOptionsFlowHandler(OptionsFlow):
|
|||
),
|
||||
}
|
||||
),
|
||||
self.options,
|
||||
self.config_entry.options,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -52,7 +52,7 @@ async def async_tts_voices(
|
|||
|
||||
|
||||
def tts_options_schema(
|
||||
config_options: dict[str, Any],
|
||||
config_options: Mapping[str, Any],
|
||||
voices: dict[str, list[str]],
|
||||
from_config_flow: bool = False,
|
||||
) -> vol.Schema:
|
||||
|
|
|
@ -25,7 +25,16 @@ UNIT_TASKS = "tasks"
|
|||
ATTR_CONFIG_ENTRY = "config_entry"
|
||||
ATTR_SKILL = "skill"
|
||||
ATTR_TASK = "task"
|
||||
ATTR_DIRECTION = "direction"
|
||||
SERVICE_CAST_SKILL = "cast_skill"
|
||||
SERVICE_START_QUEST = "start_quest"
|
||||
SERVICE_ACCEPT_QUEST = "accept_quest"
|
||||
SERVICE_CANCEL_QUEST = "cancel_quest"
|
||||
SERVICE_ABORT_QUEST = "abort_quest"
|
||||
SERVICE_REJECT_QUEST = "reject_quest"
|
||||
SERVICE_LEAVE_QUEST = "leave_quest"
|
||||
SERVICE_SCORE_HABIT = "score_habit"
|
||||
SERVICE_SCORE_REWARD = "score_reward"
|
||||
|
||||
WARRIOR = "warrior"
|
||||
ROGUE = "rogue"
|
||||
|
|
|
@ -51,17 +51,22 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
|
|||
),
|
||||
)
|
||||
self.api = habitipy
|
||||
self.content: dict[str, Any] = {}
|
||||
|
||||
async def _async_update_data(self) -> HabiticaData:
|
||||
try:
|
||||
user_response = await self.api.user.get()
|
||||
tasks_response = await self.api.tasks.user.get()
|
||||
tasks_response.extend(await self.api.tasks.user.get(type="completedTodos"))
|
||||
if not self.content:
|
||||
self.content = await self.api.content.get(
|
||||
language=user_response["preferences"]["language"]
|
||||
)
|
||||
except ClientResponseError as error:
|
||||
if error.status == HTTPStatus.TOO_MANY_REQUESTS:
|
||||
_LOGGER.debug("Currently rate limited, skipping update")
|
||||
_LOGGER.debug("Rate limit exceeded, will try again later")
|
||||
return self.data
|
||||
raise UpdateFailed(f"Error communicating with API: {error}") from error
|
||||
raise UpdateFailed(f"Unable to connect to Habitica: {error}") from error
|
||||
|
||||
return HabiticaData(user=user_response, tasks=tasks_response)
|
||||
|
||||
|
|
|
@ -126,6 +126,18 @@
|
|||
},
|
||||
"rewards": {
|
||||
"default": "mdi:treasure-chest"
|
||||
},
|
||||
"strength": {
|
||||
"default": "mdi:arm-flex-outline"
|
||||
},
|
||||
"intelligence": {
|
||||
"default": "mdi:head-snowflake-outline"
|
||||
},
|
||||
"perception": {
|
||||
"default": "mdi:eye-outline"
|
||||
},
|
||||
"constitution": {
|
||||
"default": "mdi:run-fast"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
@ -151,6 +163,30 @@
|
|||
},
|
||||
"cast_skill": {
|
||||
"service": "mdi:creation-outline"
|
||||
},
|
||||
"accept_quest": {
|
||||
"service": "mdi:script-text"
|
||||
},
|
||||
"reject_quest": {
|
||||
"service": "mdi:script-text"
|
||||
},
|
||||
"leave_quest": {
|
||||
"service": "mdi:script-text"
|
||||
},
|
||||
"abort_quest": {
|
||||
"service": "mdi:script-text-key"
|
||||
},
|
||||
"cancel_quest": {
|
||||
"service": "mdi:script-text-key"
|
||||
},
|
||||
"start_quest": {
|
||||
"service": "mdi:script-text-key"
|
||||
},
|
||||
"score_habit": {
|
||||
"service": "mdi:counter"
|
||||
},
|
||||
"score_reward": {
|
||||
"service": "mdi:sack"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ from homeassistant.helpers.typing import StateType
|
|||
from .const import DOMAIN, UNIT_TASKS
|
||||
from .entity import HabiticaBase
|
||||
from .types import HabiticaConfigEntry
|
||||
from .util import entity_used_in
|
||||
from .util import entity_used_in, get_attribute_points, get_attributes_total
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -36,7 +36,10 @@ _LOGGER = logging.getLogger(__name__)
|
|||
class HabitipySensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitipy Sensor Description."""
|
||||
|
||||
value_fn: Callable[[dict[str, Any]], StateType]
|
||||
value_fn: Callable[[dict[str, Any], dict[str, Any]], StateType]
|
||||
attributes_fn: (
|
||||
Callable[[dict[str, Any], dict[str, Any]], dict[str, Any] | None] | None
|
||||
) = None
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
|
@ -65,76 +68,80 @@ class HabitipySensorEntity(StrEnum):
|
|||
REWARDS = "rewards"
|
||||
GEMS = "gems"
|
||||
TRINKETS = "trinkets"
|
||||
STRENGTH = "strength"
|
||||
INTELLIGENCE = "intelligence"
|
||||
CONSTITUTION = "constitution"
|
||||
PERCEPTION = "perception"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.DISPLAY_NAME,
|
||||
translation_key=HabitipySensorEntity.DISPLAY_NAME,
|
||||
value_fn=lambda user: user.get("profile", {}).get("name"),
|
||||
value_fn=lambda user, _: user.get("profile", {}).get("name"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.HEALTH,
|
||||
translation_key=HabitipySensorEntity.HEALTH,
|
||||
native_unit_of_measurement="HP",
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda user: user.get("stats", {}).get("hp"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("hp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.HEALTH_MAX,
|
||||
translation_key=HabitipySensorEntity.HEALTH_MAX,
|
||||
native_unit_of_measurement="HP",
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda user: user.get("stats", {}).get("maxHealth"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.MANA,
|
||||
translation_key=HabitipySensorEntity.MANA,
|
||||
native_unit_of_measurement="MP",
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda user: user.get("stats", {}).get("mp"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("mp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.MANA_MAX,
|
||||
translation_key=HabitipySensorEntity.MANA_MAX,
|
||||
native_unit_of_measurement="MP",
|
||||
value_fn=lambda user: user.get("stats", {}).get("maxMP"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("maxMP"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.EXPERIENCE,
|
||||
translation_key=HabitipySensorEntity.EXPERIENCE,
|
||||
native_unit_of_measurement="XP",
|
||||
value_fn=lambda user: user.get("stats", {}).get("exp"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("exp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.EXPERIENCE_MAX,
|
||||
translation_key=HabitipySensorEntity.EXPERIENCE_MAX,
|
||||
native_unit_of_measurement="XP",
|
||||
value_fn=lambda user: user.get("stats", {}).get("toNextLevel"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.LEVEL,
|
||||
translation_key=HabitipySensorEntity.LEVEL,
|
||||
value_fn=lambda user: user.get("stats", {}).get("lvl"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("lvl"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.GOLD,
|
||||
translation_key=HabitipySensorEntity.GOLD,
|
||||
native_unit_of_measurement="GP",
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda user: user.get("stats", {}).get("gp"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("gp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.CLASS,
|
||||
translation_key=HabitipySensorEntity.CLASS,
|
||||
value_fn=lambda user: user.get("stats", {}).get("class"),
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("class"),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["warrior", "healer", "wizard", "rogue"],
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.GEMS,
|
||||
translation_key=HabitipySensorEntity.GEMS,
|
||||
value_fn=lambda user: user.get("balance", 0) * 4,
|
||||
value_fn=lambda user, _: user.get("balance", 0) * 4,
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="gems",
|
||||
),
|
||||
|
@ -142,7 +149,7 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
|
|||
key=HabitipySensorEntity.TRINKETS,
|
||||
translation_key=HabitipySensorEntity.TRINKETS,
|
||||
value_fn=(
|
||||
lambda user: user.get("purchased", {})
|
||||
lambda user, _: user.get("purchased", {})
|
||||
.get("plan", {})
|
||||
.get("consecutive", {})
|
||||
.get("trinkets", 0)
|
||||
|
@ -150,6 +157,38 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
|
|||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="⧖",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.STRENGTH,
|
||||
translation_key=HabitipySensorEntity.STRENGTH,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "str"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "str"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="STR",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.INTELLIGENCE,
|
||||
translation_key=HabitipySensorEntity.INTELLIGENCE,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "int"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "int"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="INT",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.PERCEPTION,
|
||||
translation_key=HabitipySensorEntity.PERCEPTION,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "per"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "per"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="PER",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.CONSTITUTION,
|
||||
translation_key=HabitipySensorEntity.CONSTITUTION,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "con"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "con"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="CON",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -243,7 +282,16 @@ class HabitipySensor(HabiticaBase, SensorEntity):
|
|||
def native_value(self) -> StateType:
|
||||
"""Return the state of the device."""
|
||||
|
||||
return self.entity_description.value_fn(self.coordinator.data.user)
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data.user, self.coordinator.content
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, float | None] | None:
|
||||
"""Return entity specific state attributes."""
|
||||
if func := self.entity_description.attributes_fn:
|
||||
return func(self.coordinator.data.user, self.coordinator.content)
|
||||
return None
|
||||
|
||||
|
||||
class HabitipyTaskSensor(HabiticaBase, SensorEntity):
|
||||
|
|
|
@ -19,19 +19,29 @@ from homeassistant.core import (
|
|||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
|
||||
from .const import (
|
||||
ATTR_ARGS,
|
||||
ATTR_CONFIG_ENTRY,
|
||||
ATTR_DATA,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_PATH,
|
||||
ATTR_SKILL,
|
||||
ATTR_TASK,
|
||||
DOMAIN,
|
||||
EVENT_API_CALL_SUCCESS,
|
||||
SERVICE_ABORT_QUEST,
|
||||
SERVICE_ACCEPT_QUEST,
|
||||
SERVICE_API_CALL,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_CAST_SKILL,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
SERVICE_SCORE_HABIT,
|
||||
SERVICE_SCORE_REWARD,
|
||||
SERVICE_START_QUEST,
|
||||
)
|
||||
from .types import HabiticaConfigEntry
|
||||
|
||||
|
@ -54,6 +64,19 @@ SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
|||
}
|
||||
)
|
||||
|
||||
SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
}
|
||||
)
|
||||
SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_DIRECTION): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
|
@ -70,10 +93,23 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
|||
return entry
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
"""Set up services for Habitica integration."""
|
||||
|
||||
async def handle_api_call(call: ServiceCall) -> None:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_api_call",
|
||||
breaks_in_ha_version="2025.6.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_api_call",
|
||||
)
|
||||
_LOGGER.warning(
|
||||
"Deprecated action called: 'habitica.api_call' is deprecated and will be removed in Home Assistant version 2025.6.0"
|
||||
)
|
||||
|
||||
name = call.data[ATTR_NAME]
|
||||
path = call.data[ATTR_PATH]
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
|
@ -160,6 +196,104 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
|||
await coordinator.async_request_refresh()
|
||||
return response
|
||||
|
||||
async def manage_quests(call: ServiceCall) -> ServiceResponse:
|
||||
"""Accept, reject, start, leave or cancel quests."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
COMMAND_MAP = {
|
||||
SERVICE_ABORT_QUEST: "abort",
|
||||
SERVICE_ACCEPT_QUEST: "accept",
|
||||
SERVICE_CANCEL_QUEST: "cancel",
|
||||
SERVICE_LEAVE_QUEST: "leave",
|
||||
SERVICE_REJECT_QUEST: "reject",
|
||||
SERVICE_START_QUEST: "force-start",
|
||||
}
|
||||
try:
|
||||
return await coordinator.api.groups.party.quests[
|
||||
COMMAND_MAP[call.service]
|
||||
].post()
|
||||
except ClientResponseError as e:
|
||||
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
) from e
|
||||
if e.status == HTTPStatus.UNAUTHORIZED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN, translation_key="quest_action_unallowed"
|
||||
) from e
|
||||
if e.status == HTTPStatus.NOT_FOUND:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN, translation_key="quest_not_found"
|
||||
) from e
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="service_call_exception"
|
||||
) from e
|
||||
|
||||
for service in (
|
||||
SERVICE_ABORT_QUEST,
|
||||
SERVICE_ACCEPT_QUEST,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
SERVICE_START_QUEST,
|
||||
):
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
service,
|
||||
manage_quests,
|
||||
schema=SERVICE_MANAGE_QUEST_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
async def score_task(call: ServiceCall) -> ServiceResponse:
|
||||
"""Score a task action."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
try:
|
||||
task_id, task_value = next(
|
||||
(task["id"], task.get("value"))
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (task["id"], task.get("alias"))
|
||||
or call.data[ATTR_TASK] == task["text"]
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
|
||||
try:
|
||||
response: dict[str, Any] = (
|
||||
await coordinator.api.tasks[task_id]
|
||||
.score[call.data.get(ATTR_DIRECTION, "up")]
|
||||
.post()
|
||||
)
|
||||
except ClientResponseError as e:
|
||||
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
) from e
|
||||
if e.status == HTTPStatus.UNAUTHORIZED and task_value is not None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_enough_gold",
|
||||
translation_placeholders={
|
||||
"gold": f"{coordinator.data.user["stats"]["gp"]:.2f} GP",
|
||||
"cost": f"{task_value} GP",
|
||||
},
|
||||
) from e
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
) from e
|
||||
else:
|
||||
await coordinator.async_request_refresh()
|
||||
return response
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_API_CALL,
|
||||
|
@ -174,3 +308,18 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
|||
schema=SERVICE_CAST_SKILL_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SCORE_HABIT,
|
||||
score_task,
|
||||
schema=SERVICE_SCORE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SCORE_REWARD,
|
||||
score_task,
|
||||
schema=SERVICE_SCORE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
|
|
@ -17,7 +17,7 @@ api_call:
|
|||
object:
|
||||
cast_skill:
|
||||
fields:
|
||||
config_entry:
|
||||
config_entry: &config_entry
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
|
@ -33,7 +33,42 @@ cast_skill:
|
|||
- "fireball"
|
||||
mode: dropdown
|
||||
translation_key: "skill_select"
|
||||
task:
|
||||
task: &task
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
accept_quest:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
reject_quest:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
start_quest:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
cancel_quest:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
abort_quest:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
leave_quest:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
score_habit:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
direction:
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- value: up
|
||||
label: "➕"
|
||||
- value: down
|
||||
label: "➖"
|
||||
score_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
{
|
||||
"common": {
|
||||
"todos": "To-Do's",
|
||||
"dailies": "Dailies"
|
||||
"dailies": "Dailies",
|
||||
"config_entry_name": "Select character"
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
|
@ -164,6 +165,86 @@
|
|||
},
|
||||
"rewards": {
|
||||
"name": "Rewards"
|
||||
},
|
||||
"strength": {
|
||||
"name": "Strength",
|
||||
"state_attributes": {
|
||||
"level": {
|
||||
"name": "[%key:component::habitica::entity::sensor::level::name%]"
|
||||
},
|
||||
"equipment": {
|
||||
"name": "Battle gear"
|
||||
},
|
||||
"class": {
|
||||
"name": "Class equip bonus"
|
||||
},
|
||||
"allocated": {
|
||||
"name": "Allocated attribute points"
|
||||
},
|
||||
"buffs": {
|
||||
"name": "Buffs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"intelligence": {
|
||||
"name": "Intelligence",
|
||||
"state_attributes": {
|
||||
"level": {
|
||||
"name": "[%key:component::habitica::entity::sensor::level::name%]"
|
||||
},
|
||||
"equipment": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]"
|
||||
},
|
||||
"class": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]"
|
||||
},
|
||||
"allocated": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]"
|
||||
},
|
||||
"buffs": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"perception": {
|
||||
"name": "Perception",
|
||||
"state_attributes": {
|
||||
"level": {
|
||||
"name": "[%key:component::habitica::entity::sensor::level::name%]"
|
||||
},
|
||||
"equipment": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]"
|
||||
},
|
||||
"class": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]"
|
||||
},
|
||||
"allocated": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]"
|
||||
},
|
||||
"buffs": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"constitution": {
|
||||
"name": "Constitution",
|
||||
"state_attributes": {
|
||||
"level": {
|
||||
"name": "[%key:component::habitica::entity::sensor::level::name%]"
|
||||
},
|
||||
"equipment": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]"
|
||||
},
|
||||
"class": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]"
|
||||
},
|
||||
"allocated": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]"
|
||||
},
|
||||
"buffs": {
|
||||
"name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
@ -209,10 +290,10 @@
|
|||
"message": "Unable to create new to-do `{name}` for Habitica, please try again"
|
||||
},
|
||||
"setup_rate_limit_exception": {
|
||||
"message": "Currently rate limited, try again later"
|
||||
"message": "Rate limit exceeded, try again later"
|
||||
},
|
||||
"service_call_unallowed": {
|
||||
"message": "Unable to carry out this action, because the required conditions are not met"
|
||||
"message": "Unable to complete action, the required conditions are not met"
|
||||
},
|
||||
"service_call_exception": {
|
||||
"message": "Unable to connect to Habitica, try again later"
|
||||
|
@ -220,6 +301,9 @@
|
|||
"not_enough_mana": {
|
||||
"message": "Unable to cast skill, not enough mana. Your character has {mana}, but the skill costs {cost}."
|
||||
},
|
||||
"not_enough_gold": {
|
||||
"message": "Unable to buy reward, not enough gold. Your character has {gold}, but the reward costs {cost}."
|
||||
},
|
||||
"skill_not_found": {
|
||||
"message": "Unable to cast skill, your character does not have the skill or spell {skill}."
|
||||
},
|
||||
|
@ -230,13 +314,23 @@
|
|||
"message": "The selected character is currently not loaded or disabled in Home Assistant."
|
||||
},
|
||||
"task_not_found": {
|
||||
"message": "Unable to cast skill, could not find the task {task}"
|
||||
"message": "Unable to complete action, could not find the task {task}"
|
||||
},
|
||||
"quest_action_unallowed": {
|
||||
"message": "Action not allowed, only quest leader or group leader can perform this action"
|
||||
},
|
||||
"quest_not_found": {
|
||||
"message": "Unable to complete action, quest or group not found"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_task_entity": {
|
||||
"title": "The Habitica {task_name} sensor is deprecated",
|
||||
"description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to replace the sensor entity with the newly added todo entity.\nWhen you are done migrating you can disable `{entity}`."
|
||||
},
|
||||
"deprecated_api_call": {
|
||||
"title": "The Habitica action habitica.api_call is deprecated",
|
||||
"description": "The Habitica action `habitica.api_call` is deprecated and will be removed in Home Assistant 2025.5.0.\n\nPlease update your automations and scripts to use other Habitica actions and entities."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
@ -263,7 +357,7 @@
|
|||
"description": "Use a skill or spell from your Habitica character on a specific task to affect its progress or status.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "Select character",
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Choose the Habitica character to cast the skill."
|
||||
},
|
||||
"skill": {
|
||||
|
@ -275,6 +369,98 @@
|
|||
"description": "The name (or task ID) of the task you want to target with the skill or spell."
|
||||
}
|
||||
}
|
||||
},
|
||||
"accept_quest": {
|
||||
"name": "Accept a quest invitation",
|
||||
"description": "Accept a pending invitation to a quest.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Choose the Habitica character for which to perform the action."
|
||||
}
|
||||
}
|
||||
},
|
||||
"reject_quest": {
|
||||
"name": "Reject a quest invitation",
|
||||
"description": "Reject a pending invitation to a quest.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"leave_quest": {
|
||||
"name": "Leave a quest",
|
||||
"description": "Leave the current quest you are participating in.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort_quest": {
|
||||
"name": "Abort an active quest",
|
||||
"description": "Terminate your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"cancel_quest": {
|
||||
"name": "Cancel a pending quest",
|
||||
"description": "Cancel a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"start_quest": {
|
||||
"name": "Force-start a pending quest",
|
||||
"description": "Begin the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"score_habit": {
|
||||
"name": "Track a habit",
|
||||
"description": "Increase the positive or negative streak of a habit to track its progress.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica character tracking your habit."
|
||||
},
|
||||
"task": {
|
||||
"name": "Habit name",
|
||||
"description": "The name (or task ID) of the Habitica habit."
|
||||
},
|
||||
"direction": {
|
||||
"name": "Reward or loss",
|
||||
"description": "Is it positive or negative progress you want to track for your habit."
|
||||
}
|
||||
}
|
||||
},
|
||||
"score_reward": {
|
||||
"name": "Buy a reward",
|
||||
"description": "Reward yourself and buy one of your custom rewards with gold earned by fulfilling tasks.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica character buying the reward."
|
||||
},
|
||||
"task": {
|
||||
"name": "Reward name",
|
||||
"description": "The name (or task ID) of the custom reward."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
from math import floor
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from dateutil.rrule import (
|
||||
|
@ -139,3 +140,52 @@ def get_recurrence_rule(recurrence: rrule) -> str:
|
|||
|
||||
"""
|
||||
return str(recurrence).split("RRULE:")[1]
|
||||
|
||||
|
||||
def get_attribute_points(
|
||||
user: dict[str, Any], content: dict[str, Any], attribute: str
|
||||
) -> dict[str, float]:
|
||||
"""Get modifiers contributing to strength attribute."""
|
||||
|
||||
gear_set = {
|
||||
"weapon",
|
||||
"armor",
|
||||
"head",
|
||||
"shield",
|
||||
"back",
|
||||
"headAccessory",
|
||||
"eyewear",
|
||||
"body",
|
||||
}
|
||||
|
||||
equipment = sum(
|
||||
stats[attribute]
|
||||
for gear in gear_set
|
||||
if (equipped := user["items"]["gear"]["equipped"].get(gear))
|
||||
and (stats := content["gear"]["flat"].get(equipped))
|
||||
)
|
||||
|
||||
class_bonus = sum(
|
||||
stats[attribute] / 2
|
||||
for gear in gear_set
|
||||
if (equipped := user["items"]["gear"]["equipped"].get(gear))
|
||||
and (stats := content["gear"]["flat"].get(equipped))
|
||||
and stats["klass"] == user["stats"]["class"]
|
||||
)
|
||||
|
||||
return {
|
||||
"level": min(round(user["stats"]["lvl"] / 2), 50),
|
||||
"equipment": equipment,
|
||||
"class": class_bonus,
|
||||
"allocated": user["stats"][attribute],
|
||||
"buffs": user["stats"]["buffs"][attribute],
|
||||
}
|
||||
|
||||
|
||||
def get_attributes_total(
|
||||
user: dict[str, Any], content: dict[str, Any], attribute: str
|
||||
) -> int:
|
||||
"""Get total attribute points."""
|
||||
return floor(
|
||||
sum(value for value in get_attribute_points(user, content, attribute).values())
|
||||
)
|
||||
|
|
|
@ -137,17 +137,3 @@ class SupervisorEntityModel(StrEnum):
|
|||
CORE = "Home Assistant Core"
|
||||
SUPERVIOSR = "Home Assistant Supervisor"
|
||||
HOST = "Home Assistant Host"
|
||||
|
||||
|
||||
class SupervisorIssueContext(StrEnum):
|
||||
"""Context for supervisor issues."""
|
||||
|
||||
ADDON = "addon"
|
||||
CORE = "core"
|
||||
DNS_SERVER = "dns_server"
|
||||
MOUNT = "mount"
|
||||
OS = "os"
|
||||
PLUGIN = "plugin"
|
||||
SUPERVISOR = "supervisor"
|
||||
STORE = "store"
|
||||
SYSTEM = "system"
|
||||
|
|
|
@ -91,15 +91,6 @@ async def async_create_backup(
|
|||
return await hassio.send_command(command, payload=payload, timeout=None)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@_api_bool
|
||||
async def async_apply_suggestion(hass: HomeAssistant, suggestion_uuid: str) -> dict:
|
||||
"""Apply a suggestion from supervisor's resolution center."""
|
||||
hassio: HassIO = hass.data[DOMAIN]
|
||||
command = f"/resolution/suggestion/{suggestion_uuid}"
|
||||
return await hassio.send_command(command, timeout=None)
|
||||
|
||||
|
||||
@api_data
|
||||
async def async_get_green_settings(hass: HomeAssistant) -> dict[str, bool]:
|
||||
"""Return settings specific to Home Assistant Green."""
|
||||
|
@ -245,26 +236,6 @@ class HassIO:
|
|||
"""
|
||||
return self.send_command("/ingress/panels", method="get")
|
||||
|
||||
@api_data
|
||||
def get_resolution_info(self) -> Coroutine:
|
||||
"""Return data for Supervisor resolution center.
|
||||
|
||||
This method returns a coroutine.
|
||||
"""
|
||||
return self.send_command("/resolution/info", method="get")
|
||||
|
||||
@api_data
|
||||
def get_suggestions_for_issue(
|
||||
self, issue_id: str
|
||||
) -> Coroutine[Any, Any, dict[str, Any]]:
|
||||
"""Return suggestions for issue from Supervisor resolution center.
|
||||
|
||||
This method returns a coroutine.
|
||||
"""
|
||||
return self.send_command(
|
||||
f"/resolution/issue/{issue_id}/suggestions", method="get"
|
||||
)
|
||||
|
||||
@_api_bool
|
||||
async def update_hass_api(
|
||||
self, http_config: dict[str, Any], refresh_token: RefreshToken
|
||||
|
@ -304,14 +275,6 @@ class HassIO:
|
|||
"/supervisor/options", payload={"diagnostics": diagnostics}
|
||||
)
|
||||
|
||||
@_api_bool
|
||||
def apply_suggestion(self, suggestion_uuid: str) -> Coroutine:
|
||||
"""Apply a suggestion from supervisor's resolution center.
|
||||
|
||||
This method returns a coroutine.
|
||||
"""
|
||||
return self.send_command(f"/resolution/suggestion/{suggestion_uuid}")
|
||||
|
||||
async def send_command(
|
||||
self,
|
||||
command: str,
|
||||
|
|
|
@ -7,6 +7,10 @@ from dataclasses import dataclass, field
|
|||
from datetime import datetime
|
||||
import logging
|
||||
from typing import Any, NotRequired, TypedDict
|
||||
from uuid import UUID
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import ContextType, Issue as SupervisorIssue
|
||||
|
||||
from homeassistant.core import HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
@ -20,12 +24,8 @@ from homeassistant.helpers.issue_registry import (
|
|||
from .const import (
|
||||
ATTR_DATA,
|
||||
ATTR_HEALTHY,
|
||||
ATTR_ISSUES,
|
||||
ATTR_SUGGESTIONS,
|
||||
ATTR_SUPPORTED,
|
||||
ATTR_UNHEALTHY,
|
||||
ATTR_UNHEALTHY_REASONS,
|
||||
ATTR_UNSUPPORTED,
|
||||
ATTR_UNSUPPORTED_REASONS,
|
||||
ATTR_UPDATE_KEY,
|
||||
ATTR_WS_EVENT,
|
||||
|
@ -45,10 +45,9 @@ from .const import (
|
|||
PLACEHOLDER_KEY_REFERENCE,
|
||||
REQUEST_REFRESH_DELAY,
|
||||
UPDATE_KEY_SUPERVISOR,
|
||||
SupervisorIssueContext,
|
||||
)
|
||||
from .coordinator import get_addons_info
|
||||
from .handler import HassIO, HassioAPIError
|
||||
from .handler import HassIO, get_supervisor_client
|
||||
|
||||
ISSUE_KEY_UNHEALTHY = "unhealthy"
|
||||
ISSUE_KEY_UNSUPPORTED = "unsupported"
|
||||
|
@ -120,9 +119,9 @@ class SuggestionDataType(TypedDict):
|
|||
class Suggestion:
|
||||
"""Suggestion from Supervisor which resolves an issue."""
|
||||
|
||||
uuid: str
|
||||
uuid: UUID
|
||||
type: str
|
||||
context: SupervisorIssueContext
|
||||
context: ContextType
|
||||
reference: str | None = None
|
||||
|
||||
@property
|
||||
|
@ -134,9 +133,9 @@ class Suggestion:
|
|||
def from_dict(cls, data: SuggestionDataType) -> Suggestion:
|
||||
"""Convert from dictionary representation."""
|
||||
return cls(
|
||||
uuid=data["uuid"],
|
||||
uuid=UUID(data["uuid"]),
|
||||
type=data["type"],
|
||||
context=SupervisorIssueContext(data["context"]),
|
||||
context=ContextType(data["context"]),
|
||||
reference=data["reference"],
|
||||
)
|
||||
|
||||
|
@ -155,9 +154,9 @@ class IssueDataType(TypedDict):
|
|||
class Issue:
|
||||
"""Issue from Supervisor."""
|
||||
|
||||
uuid: str
|
||||
uuid: UUID
|
||||
type: str
|
||||
context: SupervisorIssueContext
|
||||
context: ContextType
|
||||
reference: str | None = None
|
||||
suggestions: list[Suggestion] = field(default_factory=list, compare=False)
|
||||
|
||||
|
@ -171,9 +170,9 @@ class Issue:
|
|||
"""Convert from dictionary representation."""
|
||||
suggestions: list[SuggestionDataType] = data.get("suggestions", [])
|
||||
return cls(
|
||||
uuid=data["uuid"],
|
||||
uuid=UUID(data["uuid"]),
|
||||
type=data["type"],
|
||||
context=SupervisorIssueContext(data["context"]),
|
||||
context=ContextType(data["context"]),
|
||||
reference=data["reference"],
|
||||
suggestions=[
|
||||
Suggestion.from_dict(suggestion) for suggestion in suggestions
|
||||
|
@ -190,7 +189,8 @@ class SupervisorIssues:
|
|||
self._client = client
|
||||
self._unsupported_reasons: set[str] = set()
|
||||
self._unhealthy_reasons: set[str] = set()
|
||||
self._issues: dict[str, Issue] = {}
|
||||
self._issues: dict[UUID, Issue] = {}
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
|
||||
@property
|
||||
def unhealthy_reasons(self) -> set[str]:
|
||||
|
@ -283,7 +283,7 @@ class SupervisorIssues:
|
|||
async_create_issue(
|
||||
self._hass,
|
||||
DOMAIN,
|
||||
issue.uuid,
|
||||
issue.uuid.hex,
|
||||
is_fixable=bool(issue.suggestions),
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=issue.key,
|
||||
|
@ -292,19 +292,37 @@ class SupervisorIssues:
|
|||
|
||||
self._issues[issue.uuid] = issue
|
||||
|
||||
async def add_issue_from_data(self, data: IssueDataType) -> None:
|
||||
async def add_issue_from_data(self, data: SupervisorIssue) -> None:
|
||||
"""Add issue from data to list after getting latest suggestions."""
|
||||
try:
|
||||
data["suggestions"] = (
|
||||
await self._client.get_suggestions_for_issue(data["uuid"])
|
||||
)[ATTR_SUGGESTIONS]
|
||||
except HassioAPIError:
|
||||
suggestions = (
|
||||
await self._supervisor_client.resolution.suggestions_for_issue(
|
||||
data.uuid
|
||||
)
|
||||
)
|
||||
except SupervisorError:
|
||||
_LOGGER.error(
|
||||
"Could not get suggestions for supervisor issue %s, skipping it",
|
||||
data["uuid"],
|
||||
data.uuid.hex,
|
||||
)
|
||||
return
|
||||
self.add_issue(Issue.from_dict(data))
|
||||
self.add_issue(
|
||||
Issue(
|
||||
uuid=data.uuid,
|
||||
type=str(data.type),
|
||||
context=data.context,
|
||||
reference=data.reference,
|
||||
suggestions=[
|
||||
Suggestion(
|
||||
uuid=suggestion.uuid,
|
||||
type=str(suggestion.type),
|
||||
context=suggestion.context,
|
||||
reference=suggestion.reference,
|
||||
)
|
||||
for suggestion in suggestions
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
def remove_issue(self, issue: Issue) -> None:
|
||||
"""Remove an issue from the list. Delete a repair if necessary."""
|
||||
|
@ -312,13 +330,13 @@ class SupervisorIssues:
|
|||
return
|
||||
|
||||
if issue.key in ISSUE_KEYS_FOR_REPAIRS:
|
||||
async_delete_issue(self._hass, DOMAIN, issue.uuid)
|
||||
async_delete_issue(self._hass, DOMAIN, issue.uuid.hex)
|
||||
|
||||
del self._issues[issue.uuid]
|
||||
|
||||
def get_issue(self, issue_id: str) -> Issue | None:
|
||||
"""Get issue from key."""
|
||||
return self._issues.get(issue_id)
|
||||
return self._issues.get(UUID(issue_id))
|
||||
|
||||
async def setup(self) -> None:
|
||||
"""Create supervisor events listener."""
|
||||
|
@ -331,8 +349,8 @@ class SupervisorIssues:
|
|||
async def _update(self, _: datetime | None = None) -> None:
|
||||
"""Update issues from Supervisor resolution center."""
|
||||
try:
|
||||
data = await self._client.get_resolution_info()
|
||||
except HassioAPIError as err:
|
||||
data = await self._supervisor_client.resolution.info()
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Failed to update supervisor issues: %r", err)
|
||||
async_call_later(
|
||||
self._hass,
|
||||
|
@ -340,18 +358,16 @@ class SupervisorIssues:
|
|||
HassJob(self._update, cancel_on_shutdown=True),
|
||||
)
|
||||
return
|
||||
self.unhealthy_reasons = set(data[ATTR_UNHEALTHY])
|
||||
self.unsupported_reasons = set(data[ATTR_UNSUPPORTED])
|
||||
self.unhealthy_reasons = set(data.unhealthy)
|
||||
self.unsupported_reasons = set(data.unsupported)
|
||||
|
||||
# Remove any cached issues that weren't returned
|
||||
for issue_id in set(self._issues.keys()) - {
|
||||
issue["uuid"] for issue in data[ATTR_ISSUES]
|
||||
}:
|
||||
for issue_id in set(self._issues) - {issue.uuid for issue in data.issues}:
|
||||
self.remove_issue(self._issues[issue_id])
|
||||
|
||||
# Add/update any issues that came back
|
||||
await asyncio.gather(
|
||||
*[self.add_issue_from_data(issue) for issue in data[ATTR_ISSUES]]
|
||||
*[self.add_issue_from_data(issue) for issue in data.issues]
|
||||
)
|
||||
|
||||
@callback
|
||||
|
|
|
@ -6,6 +6,8 @@ from collections.abc import Callable, Coroutine
|
|||
from types import MethodType
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import ContextType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
|
@ -20,9 +22,8 @@ from .const import (
|
|||
PLACEHOLDER_KEY_ADDON,
|
||||
PLACEHOLDER_KEY_COMPONENTS,
|
||||
PLACEHOLDER_KEY_REFERENCE,
|
||||
SupervisorIssueContext,
|
||||
)
|
||||
from .handler import async_apply_suggestion
|
||||
from .handler import get_supervisor_client
|
||||
from .issues import Issue, Suggestion
|
||||
|
||||
HELP_URLS = {
|
||||
|
@ -51,9 +52,10 @@ class SupervisorIssueRepairFlow(RepairsFlow):
|
|||
_data: dict[str, Any] | None = None
|
||||
_issue: Issue | None = None
|
||||
|
||||
def __init__(self, issue_id: str) -> None:
|
||||
def __init__(self, hass: HomeAssistant, issue_id: str) -> None:
|
||||
"""Initialize repair flow."""
|
||||
self._issue_id = issue_id
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
|
@ -124,9 +126,12 @@ class SupervisorIssueRepairFlow(RepairsFlow):
|
|||
if not confirmed and suggestion.key in SUGGESTION_CONFIRMATION_REQUIRED:
|
||||
return self._async_form_for_suggestion(suggestion)
|
||||
|
||||
if await async_apply_suggestion(self.hass, suggestion.uuid):
|
||||
return self.async_create_entry(data={})
|
||||
return self.async_abort(reason="apply_suggestion_fail")
|
||||
try:
|
||||
await self._supervisor_client.resolution.apply_suggestion(suggestion.uuid)
|
||||
except SupervisorError:
|
||||
return self.async_abort(reason="apply_suggestion_fail")
|
||||
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
@staticmethod
|
||||
def _async_step(
|
||||
|
@ -163,9 +168,9 @@ class DockerConfigIssueRepairFlow(SupervisorIssueRepairFlow):
|
|||
if issue.key == self.issue.key or issue.type != self.issue.type:
|
||||
continue
|
||||
|
||||
if issue.context == SupervisorIssueContext.CORE:
|
||||
if issue.context == ContextType.CORE:
|
||||
components.insert(0, "Home Assistant")
|
||||
elif issue.context == SupervisorIssueContext.ADDON:
|
||||
elif issue.context == ContextType.ADDON:
|
||||
components.append(
|
||||
next(
|
||||
(
|
||||
|
@ -210,11 +215,11 @@ async def async_create_fix_flow(
|
|||
supervisor_issues = get_issues_info(hass)
|
||||
issue = supervisor_issues and supervisor_issues.get_issue(issue_id)
|
||||
if issue and issue.key == ISSUE_KEY_SYSTEM_DOCKER_CONFIG:
|
||||
return DockerConfigIssueRepairFlow(issue_id)
|
||||
return DockerConfigIssueRepairFlow(hass, issue_id)
|
||||
if issue and issue.key in {
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
}:
|
||||
return AddonIssueRepairFlow(issue_id)
|
||||
return AddonIssueRepairFlow(hass, issue_id)
|
||||
|
||||
return SupervisorIssueRepairFlow(issue_id)
|
||||
return SupervisorIssueRepairFlow(hass, issue_id)
|
||||
|
|
|
@ -12,8 +12,13 @@ from homeassistant.components.binary_sensor import (
|
|||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from .api import HomeConnectDevice
|
||||
from .const import (
|
||||
|
@ -188,11 +193,32 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor):
|
|||
async def async_added_to_hass(self) -> None:
|
||||
"""Call when entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
entity_automations = automations_with_entity(self.hass, self.entity_id)
|
||||
entity_scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
items = entity_automations + entity_scripts
|
||||
automations = automations_with_entity(self.hass, self.entity_id)
|
||||
scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
items = automations + scripts
|
||||
if not items:
|
||||
return
|
||||
|
||||
entity_reg: er.EntityRegistry = er.async_get(self.hass)
|
||||
entity_automations = [
|
||||
automation_entity
|
||||
for automation_id in automations
|
||||
if (automation_entity := entity_reg.async_get(automation_id))
|
||||
]
|
||||
entity_scripts = [
|
||||
script_entity
|
||||
for script_id in scripts
|
||||
if (script_entity := entity_reg.async_get(script_id))
|
||||
]
|
||||
|
||||
items_list = [
|
||||
f"- [{item.original_name}](/config/automation/edit/{item.unique_id})"
|
||||
for item in entity_automations
|
||||
] + [
|
||||
f"- [{item.original_name}](/config/script/edit/{item.unique_id})"
|
||||
for item in entity_scripts
|
||||
]
|
||||
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
|
@ -203,6 +229,12 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor):
|
|||
translation_key="deprecated_binary_common_door_sensor",
|
||||
translation_placeholders={
|
||||
"entity": self.entity_id,
|
||||
"items": "\n".join([f"- {item}" for item in items]),
|
||||
"items": "\n".join(items_list),
|
||||
},
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Call when entity will be removed from hass."""
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"deprecated_binary_common_door_sensor_{self.entity_id}"
|
||||
)
|
||||
|
|
|
@ -24,7 +24,6 @@ from homeassistant.config_entries import (
|
|||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
|
@ -496,13 +495,15 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow):
|
|||
return await self.async_step_pick_firmware()
|
||||
|
||||
|
||||
class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlowWithConfigEntry):
|
||||
class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow):
|
||||
"""Zigbee and Thread options flow handlers."""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, config_entry: ConfigEntry, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate options flow."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._config_entry = config_entry
|
||||
|
||||
self._probed_firmware_type = ApplicationType(self.config_entry.data["firmware"])
|
||||
|
||||
# Make `context` a regular dictionary
|
||||
|
|
|
@ -318,7 +318,6 @@ class OptionsFlowHandler(OptionsFlow, ABC):
|
|||
self.start_task: asyncio.Task | None = None
|
||||
self.stop_task: asyncio.Task | None = None
|
||||
self._zha_migration_mgr: ZhaMultiPANMigrationHelper | None = None
|
||||
self.config_entry = config_entry
|
||||
self.original_addon_config: dict[str, Any] | None = None
|
||||
self.revert_reason: str | None = None
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@ from homeassistant.const import (
|
|||
SERVICE_ALARM_ARM_HOME,
|
||||
SERVICE_ALARM_ARM_NIGHT,
|
||||
SERVICE_ALARM_DISARM,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import State, callback
|
||||
|
||||
|
@ -152,12 +154,12 @@ class SecuritySystem(HomeAccessory):
|
|||
@callback
|
||||
def async_update_state(self, new_state: State) -> None:
|
||||
"""Update security state after state changed."""
|
||||
hass_state = None
|
||||
if new_state and new_state.state == "None":
|
||||
# Bail out early for no state
|
||||
hass_state: str | AlarmControlPanelState = new_state.state
|
||||
if hass_state in {"None", STATE_UNKNOWN, STATE_UNAVAILABLE}:
|
||||
# Bail out early for no state, unknown or unavailable
|
||||
return
|
||||
if new_state and new_state.state is not None:
|
||||
hass_state = AlarmControlPanelState(new_state.state)
|
||||
if hass_state is not None:
|
||||
hass_state = AlarmControlPanelState(hass_state)
|
||||
if (
|
||||
hass_state
|
||||
and (current_state := HASS_TO_HOMEKIT_CURRENT.get(hass_state)) is not None
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue