Compare commits
13 commits
dev
...
config_sub
Author | SHA1 | Date | |
---|---|---|---|
|
2462dfbc47 | ||
|
2246f86d4b | ||
|
02a5fed2e4 | ||
|
498773bad0 | ||
|
b2922334aa | ||
|
1a06a2b6a6 | ||
|
55bf543648 | ||
|
df13bed5b5 | ||
|
a298091968 | ||
|
977c8311c9 | ||
|
40d1c164c4 | ||
|
a3077513e4 | ||
|
e15cf8b633 |
595 changed files with 5444 additions and 16861 deletions
|
@ -79,7 +79,6 @@ components: &components
|
|||
- homeassistant/components/group/**
|
||||
- homeassistant/components/hassio/**
|
||||
- homeassistant/components/homeassistant/**
|
||||
- homeassistant/components/homeassistant_hardware/**
|
||||
- homeassistant/components/http/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/input_boolean/**
|
||||
|
|
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
|||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
|
|
14
.github/workflows/ci.yaml
vendored
14
.github/workflows/ci.yaml
vendored
|
@ -42,7 +42,7 @@ env:
|
|||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2024.12"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
|
@ -819,6 +819,10 @@ jobs:
|
|||
needs:
|
||||
- info
|
||||
- base
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
|
@ -832,11 +836,11 @@ jobs:
|
|||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
|
@ -854,7 +858,7 @@ jobs:
|
|||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest_buckets
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
path: pytest_buckets.txt
|
||||
overwrite: true
|
||||
|
||||
|
@ -919,7 +923,7 @@ jobs:
|
|||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: pytest_buckets
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
|
@ -24,11 +24,11 @@ jobs:
|
|||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.3
|
||||
uses: github/codeql-action/init@v3.27.0
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.3
|
||||
uses: github/codeql-action/analyze@v3.27.0
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
|
30
.github/workflows/wheels.yml
vendored
30
.github/workflows/wheels.yml
vendored
|
@ -112,7 +112,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312", "cp313"]
|
||||
abi: ["cp312"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
|
@ -135,14 +135,14 @@ jobs:
|
|||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
@ -156,7 +156,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312", "cp313"]
|
||||
abi: ["cp312"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
|
@ -198,7 +198,6 @@ jobs:
|
|||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
|
@ -209,8 +208,7 @@ jobs:
|
|||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
@ -225,43 +223,43 @@ jobs:
|
|||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.3
|
||||
rev: v0.7.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@ -90,7 +90,7 @@ repos:
|
|||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
|
|
@ -330,7 +330,6 @@ homeassistant.components.mysensors.*
|
|||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
|
@ -340,7 +339,6 @@ homeassistant.components.nfandroidtv.*
|
|||
homeassistant.components.nightscout.*
|
||||
homeassistant.components.nissan_leaf.*
|
||||
homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.number.*
|
||||
|
|
12
CODEOWNERS
12
CODEOWNERS
|
@ -40,8 +40,6 @@ build.json @home-assistant/supervisor
|
|||
# Integrations
|
||||
/homeassistant/components/abode/ @shred86
|
||||
/tests/components/abode/ @shred86
|
||||
/homeassistant/components/acaia/ @zweckj
|
||||
/tests/components/acaia/ @zweckj
|
||||
/homeassistant/components/accuweather/ @bieniu
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
|
@ -972,8 +970,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/neato/ @Santobert
|
||||
/tests/components/neato/ @Santobert
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
|
@ -1014,8 +1010,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||
/tests/components/nordpool/ @gjohansson-ST
|
||||
/homeassistant/components/notify/ @home-assistant/core
|
||||
/tests/components/notify/ @home-assistant/core
|
||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||
|
@ -1346,8 +1340,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
/tests/components/skybell/ @tkdrob
|
||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||
|
@ -1489,8 +1481,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
|
|
|
@ -13,7 +13,7 @@ ENV \
|
|||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.0
|
||||
RUN pip3 install uv==0.4.28
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -55,7 +55,7 @@ RUN \
|
|||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
|
|
@ -35,9 +35,6 @@ RUN \
|
|||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv
|
||||
|
||||
|
|
10
build.yaml
10
build.yaml
|
@ -1,10 +1,10 @@
|
|||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
|
|
@ -515,7 +515,7 @@ async def async_from_config_dict(
|
|||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
core.DOMAIN,
|
||||
f"python_version_{required_python_version}",
|
||||
"python_version",
|
||||
is_fixable=False,
|
||||
severity=issue_registry.IssueSeverity.WARNING,
|
||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "sky",
|
||||
"name": "Sky",
|
||||
"integrations": ["sky_hub", "sky_remote"]
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
"""Initialize the Acaia component."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Set up acaia as config entry."""
|
||||
|
||||
coordinator = AcaiaCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
|
@ -1,61 +0,0 @@
|
|||
"""Button entities for Acaia scales."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Description for acaia button entities."""
|
||||
|
||||
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
|
||||
AcaiaButtonEntityDescription(
|
||||
key="tare",
|
||||
translation_key="tare",
|
||||
press_fn=lambda scale: scale.tare(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="reset_timer",
|
||||
translation_key="reset_timer",
|
||||
press_fn=lambda scale: scale.reset_timer(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="start_stop",
|
||||
translation_key="start_stop",
|
||||
press_fn=lambda scale: scale.start_stop_timer(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities and services."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
|
||||
|
||||
|
||||
class AcaiaButton(AcaiaEntity, ButtonEntity):
|
||||
"""Representation of an Acaia button."""
|
||||
|
||||
entity_description: AcaiaButtonEntityDescription
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
await self.entity_description.press_fn(self._scale)
|
|
@ -1,149 +0,0 @@
|
|||
"""Config flow for Acaia integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||
from aioacaia.helpers import is_new_scale
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_NAME
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for acaia."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered: dict[str, Any] = {}
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
mac = format_mac(user_input[CONF_ADDRESS])
|
||||
try:
|
||||
is_new_style_scale = await is_new_scale(mac)
|
||||
except AcaiaDeviceNotFound:
|
||||
errors["base"] = "device_not_found"
|
||||
except AcaiaError:
|
||||
_LOGGER.exception("Error occurred while connecting to the scale")
|
||||
errors["base"] = "unknown"
|
||||
except AcaiaUnknownDevice:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
else:
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[user_input[CONF_ADDRESS]],
|
||||
data={
|
||||
CONF_ADDRESS: mac,
|
||||
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||
},
|
||||
)
|
||||
|
||||
for device in async_discovered_service_info(self.hass):
|
||||
self._discovered_devices[device.address] = device.name
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
options = [
|
||||
SelectOptionDict(
|
||||
value=device_mac,
|
||||
label=f"{device_name} ({device_mac})",
|
||||
)
|
||||
for device_mac, device_name in self._discovered_devices.items()
|
||||
]
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Bluetooth device."""
|
||||
|
||||
self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address)
|
||||
self._discovered[CONF_NAME] = discovery_info.name
|
||||
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
|
||||
discovery_info.address
|
||||
)
|
||||
except AcaiaDeviceNotFound:
|
||||
_LOGGER.debug("Device not found during discovery")
|
||||
return self.async_abort(reason="device_not_found")
|
||||
except AcaiaError:
|
||||
_LOGGER.debug(
|
||||
"Error occurred while connecting to the scale during discovery",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.async_abort(reason="unknown")
|
||||
except AcaiaUnknownDevice:
|
||||
_LOGGER.debug("Unsupported device during discovery")
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
async def async_step_bluetooth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle confirmation of Bluetooth discovery."""
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered[CONF_NAME],
|
||||
data={
|
||||
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
|
||||
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
|
||||
},
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = placeholders = {
|
||||
CONF_NAME: self._discovered[CONF_NAME]
|
||||
}
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
description_placeholders=placeholders,
|
||||
)
|
|
@ -1,4 +0,0 @@
|
|||
"""Constants for component."""
|
||||
|
||||
DOMAIN = "acaia"
|
||||
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"
|
|
@ -1,86 +0,0 @@
|
|||
"""Coordinator for Acaia integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
|
||||
|
||||
|
||||
class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to handle fetching data from the scale."""
|
||||
|
||||
config_entry: AcaiaConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="acaia coordinator",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
self._scale = AcaiaScale(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=self.async_update_listeners,
|
||||
)
|
||||
|
||||
@property
|
||||
def scale(self) -> AcaiaScale:
|
||||
"""Return the scale object."""
|
||||
return self._scale
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data."""
|
||||
|
||||
# scale is already connected, return
|
||||
if self._scale.connected:
|
||||
return
|
||||
|
||||
# scale is not connected, try to connect
|
||||
try:
|
||||
await self._scale.connect(setup_tasks=False)
|
||||
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
|
||||
_LOGGER.debug(
|
||||
"Could not connect to scale: %s, Error: %s",
|
||||
self.config_entry.data[CONF_ADDRESS],
|
||||
ex,
|
||||
)
|
||||
self._scale.device_disconnected_handler(notify=False)
|
||||
return
|
||||
|
||||
# connected, set up background tasks
|
||||
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
|
||||
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.send_heartbeats(),
|
||||
name="acaia_heartbeat_task",
|
||||
)
|
||||
|
||||
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
|
||||
self._scale.process_queue_task = (
|
||||
self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.process_queue(),
|
||||
name="acaia_process_queue_task",
|
||||
)
|
||||
)
|
|
@ -1,40 +0,0 @@
|
|||
"""Base class for Acaia entities."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AcaiaCoordinator
|
||||
|
||||
|
||||
@dataclass
|
||||
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||
"""Common elements for all entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AcaiaCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._scale = coordinator.scale
|
||||
self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._scale.mac)},
|
||||
manufacturer="Acaia",
|
||||
model=self._scale.model,
|
||||
suggested_area="Kitchen",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Returns whether entity is available."""
|
||||
return super().available and self._scale.connected
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"default": "mdi:scale-balance"
|
||||
},
|
||||
"reset_timer": {
|
||||
"default": "mdi:timer-refresh"
|
||||
},
|
||||
"start_stop": {
|
||||
"default": "mdi:timer-play"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"domain": "acaia",
|
||||
"name": "Acaia",
|
||||
"bluetooth": [
|
||||
{
|
||||
"manufacturer_id": 16962
|
||||
},
|
||||
{
|
||||
"local_name": "ACAIA*"
|
||||
},
|
||||
{
|
||||
"local_name": "PYXIS-*"
|
||||
},
|
||||
{
|
||||
"local_name": "LUNAR-*"
|
||||
},
|
||||
{
|
||||
"local_name": "PROCHBT001"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/acaia",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"requirements": ["aioacaia==0.1.6"]
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"unsupported_device": "This device is not supported."
|
||||
},
|
||||
"error": {
|
||||
"device_not_found": "Device could not be found.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
},
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"name": "Tare"
|
||||
},
|
||||
"reset_timer": {
|
||||
"name": "Reset timer"
|
||||
},
|
||||
"start_stop": {
|
||||
"name": "Start/stop timer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
"requirements": ["agent-py==0.0.23"]
|
||||
}
|
||||
|
|
|
@ -11,5 +11,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.6"]
|
||||
"requirements": ["aioairzone==0.9.5"]
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import asyncio
|
|||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
from typing import Any, Final, final
|
||||
|
||||
from propcache import cached_property
|
||||
import voluptuous as vol
|
||||
|
@ -221,15 +221,9 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
|||
@property
|
||||
def state(self) -> str | None:
|
||||
"""Return the current state."""
|
||||
if (alarm_state := self.alarm_state) is not None:
|
||||
return alarm_state
|
||||
if self._attr_state is not None:
|
||||
# Backwards compatibility for integrations that set state directly
|
||||
# Should be removed in 2025.11
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self._attr_state, str)
|
||||
return self._attr_state
|
||||
return None
|
||||
if (alarm_state := self.alarm_state) is None:
|
||||
return None
|
||||
return alarm_state
|
||||
|
||||
@cached_property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
|
|
|
@ -32,9 +32,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
|
||||
async def async_handle_create_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating backups."""
|
||||
await backup_manager.async_create_backup(on_progress=None)
|
||||
if backup_task := backup_manager.backup_task:
|
||||
await backup_task
|
||||
await backup_manager.async_create_backup()
|
||||
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
|
||||
|
|
|
@ -2,26 +2,23 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from typing import cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .const import DOMAIN
|
||||
from .manager import BaseBackupManager
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_http_views(hass: HomeAssistant) -> None:
|
||||
"""Register the http views."""
|
||||
hass.http.register_view(DownloadBackupView)
|
||||
hass.http.register_view(UploadBackupView)
|
||||
|
||||
|
||||
class DownloadBackupView(HomeAssistantView):
|
||||
|
@ -39,7 +36,7 @@ class DownloadBackupView(HomeAssistantView):
|
|||
if not request["hass_user"].is_admin:
|
||||
return Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
manager: BaseBackupManager = request.app[KEY_HASS].data[DOMAIN]
|
||||
backup = await manager.async_get_backup(slug=slug)
|
||||
|
||||
if backup is None or not backup.path.exists():
|
||||
|
@ -51,29 +48,3 @@ class DownloadBackupView(HomeAssistantView):
|
|||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
||||
url = "/api/backup/upload"
|
||||
name = "api:backup:upload"
|
||||
|
||||
@require_admin
|
||||
async def post(self, request: Request) -> Response:
|
||||
"""Upload a backup file."""
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
reader = await request.multipart()
|
||||
contents = cast(BodyPartReader, await reader.next())
|
||||
|
||||
try:
|
||||
await manager.async_receive_backup(contents=contents)
|
||||
except OSError as err:
|
||||
return Response(
|
||||
body=f"Can't write backup file {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
return Response(status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
||||
|
||||
return Response(status=HTTPStatus.CREATED)
|
||||
|
|
|
@ -4,21 +4,16 @@ from __future__ import annotations
|
|||
|
||||
import abc
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import asdict, dataclass
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
from pathlib import Path
|
||||
from queue import SimpleQueue
|
||||
import shutil
|
||||
import tarfile
|
||||
from tarfile import TarError
|
||||
from tempfile import TemporaryDirectory
|
||||
import time
|
||||
from typing import Any, Protocol, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
|
||||
from homeassistant.backup_restore import RESTORE_BACKUP_FILE
|
||||
|
@ -35,13 +30,6 @@ from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER
|
|||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class NewBackup:
|
||||
"""New backup class."""
|
||||
|
||||
slug: str
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Backup:
|
||||
"""Backup class."""
|
||||
|
@ -57,15 +45,6 @@ class Backup:
|
|||
return {**asdict(self), "path": self.path.as_posix()}
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class BackupProgress:
|
||||
"""Backup progress class."""
|
||||
|
||||
done: bool
|
||||
stage: str | None
|
||||
success: bool | None
|
||||
|
||||
|
||||
class BackupPlatformProtocol(Protocol):
|
||||
"""Define the format that backup platforms can have."""
|
||||
|
||||
|
@ -82,7 +61,7 @@ class BaseBackupManager(abc.ABC):
|
|||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the backup manager."""
|
||||
self.hass = hass
|
||||
self.backup_task: asyncio.Task | None = None
|
||||
self.backing_up = False
|
||||
self.backups: dict[str, Backup] = {}
|
||||
self.loaded_platforms = False
|
||||
self.platforms: dict[str, BackupPlatformProtocol] = {}
|
||||
|
@ -147,15 +126,10 @@ class BaseBackupManager(abc.ABC):
|
|||
|
||||
@abc.abstractmethod
|
||||
async def async_restore_backup(self, slug: str, **kwargs: Any) -> None:
|
||||
"""Restore a backup."""
|
||||
"""Restpre a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_create_backup(
|
||||
self,
|
||||
*,
|
||||
on_progress: Callable[[BackupProgress], None] | None,
|
||||
**kwargs: Any,
|
||||
) -> NewBackup:
|
||||
async def async_create_backup(self, **kwargs: Any) -> Backup:
|
||||
"""Generate a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -173,15 +147,6 @@ class BaseBackupManager(abc.ABC):
|
|||
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||
"""Remove a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
*,
|
||||
contents: aiohttp.BodyPartReader,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
|
||||
|
||||
class BackupManager(BaseBackupManager):
|
||||
"""Backup manager for the Backup integration."""
|
||||
|
@ -257,93 +222,17 @@ class BackupManager(BaseBackupManager):
|
|||
LOGGER.debug("Removed backup located at %s", backup.path)
|
||||
self.backups.pop(slug)
|
||||
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
*,
|
||||
contents: aiohttp.BodyPartReader,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = (
|
||||
SimpleQueue()
|
||||
)
|
||||
temp_dir_handler = await self.hass.async_add_executor_job(TemporaryDirectory)
|
||||
target_temp_file = Path(
|
||||
temp_dir_handler.name, contents.filename or "backup.tar"
|
||||
)
|
||||
|
||||
def _sync_queue_consumer() -> None:
|
||||
with target_temp_file.open("wb") as file_handle:
|
||||
while True:
|
||||
if (_chunk_future := queue.get()) is None:
|
||||
break
|
||||
_chunk, _future = _chunk_future
|
||||
if _future is not None:
|
||||
self.hass.loop.call_soon_threadsafe(_future.set_result, None)
|
||||
file_handle.write(_chunk)
|
||||
|
||||
fut: asyncio.Future[None] | None = None
|
||||
try:
|
||||
fut = self.hass.async_add_executor_job(_sync_queue_consumer)
|
||||
megabytes_sending = 0
|
||||
while chunk := await contents.read_chunk(BUF_SIZE):
|
||||
megabytes_sending += 1
|
||||
if megabytes_sending % 5 != 0:
|
||||
queue.put_nowait((chunk, None))
|
||||
continue
|
||||
|
||||
chunk_future = self.hass.loop.create_future()
|
||||
queue.put_nowait((chunk, chunk_future))
|
||||
await asyncio.wait(
|
||||
(fut, chunk_future),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if fut.done():
|
||||
# The executor job failed
|
||||
break
|
||||
|
||||
queue.put_nowait(None) # terminate queue consumer
|
||||
finally:
|
||||
if fut is not None:
|
||||
await fut
|
||||
|
||||
def _move_and_cleanup() -> None:
|
||||
shutil.move(target_temp_file, self.backup_dir / target_temp_file.name)
|
||||
temp_dir_handler.cleanup()
|
||||
|
||||
await self.hass.async_add_executor_job(_move_and_cleanup)
|
||||
await self.load_backups()
|
||||
|
||||
async def async_create_backup(
|
||||
self,
|
||||
*,
|
||||
on_progress: Callable[[BackupProgress], None] | None,
|
||||
**kwargs: Any,
|
||||
) -> NewBackup:
|
||||
async def async_create_backup(self, **kwargs: Any) -> Backup:
|
||||
"""Generate a backup."""
|
||||
if self.backup_task:
|
||||
if self.backing_up:
|
||||
raise HomeAssistantError("Backup already in progress")
|
||||
backup_name = f"Core {HAVERSION}"
|
||||
date_str = dt_util.now().isoformat()
|
||||
slug = _generate_slug(date_str, backup_name)
|
||||
self.backup_task = self.hass.async_create_task(
|
||||
self._async_create_backup(backup_name, date_str, slug, on_progress),
|
||||
name="backup_manager_create_backup",
|
||||
eager_start=False, # To ensure the task is not started before we return
|
||||
)
|
||||
return NewBackup(slug=slug)
|
||||
|
||||
async def _async_create_backup(
|
||||
self,
|
||||
backup_name: str,
|
||||
date_str: str,
|
||||
slug: str,
|
||||
on_progress: Callable[[BackupProgress], None] | None,
|
||||
) -> Backup:
|
||||
"""Generate a backup."""
|
||||
success = False
|
||||
try:
|
||||
self.backing_up = True
|
||||
await self.async_pre_backup_actions()
|
||||
backup_name = f"Core {HAVERSION}"
|
||||
date_str = dt_util.now().isoformat()
|
||||
slug = _generate_slug(date_str, backup_name)
|
||||
|
||||
backup_data = {
|
||||
"slug": slug,
|
||||
|
@ -370,12 +259,9 @@ class BackupManager(BaseBackupManager):
|
|||
if self.loaded_backups:
|
||||
self.backups[slug] = backup
|
||||
LOGGER.debug("Generated new backup with slug %s", slug)
|
||||
success = True
|
||||
return backup
|
||||
finally:
|
||||
if on_progress:
|
||||
on_progress(BackupProgress(done=True, stage=None, success=success))
|
||||
self.backup_task = None
|
||||
self.backing_up = False
|
||||
await self.async_post_backup_actions()
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
|
|
|
@ -8,7 +8,6 @@ from homeassistant.components import websocket_api
|
|||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import BackupProgress
|
||||
|
||||
|
||||
@callback
|
||||
|
@ -41,7 +40,7 @@ async def handle_info(
|
|||
msg["id"],
|
||||
{
|
||||
"backups": list(backups.values()),
|
||||
"backing_up": manager.backup_task is not None,
|
||||
"backing_up": manager.backing_up,
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -114,11 +113,7 @@ async def handle_create(
|
|||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup."""
|
||||
|
||||
def on_progress(progress: BackupProgress) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], progress))
|
||||
|
||||
backup = await hass.data[DATA_MANAGER].async_create_backup(on_progress=on_progress)
|
||||
backup = await hass.data[DATA_MANAGER].async_create_backup()
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
|
@ -132,6 +127,7 @@ async def handle_backup_start(
|
|||
) -> None:
|
||||
"""Backup start notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = True
|
||||
LOGGER.debug("Backup start notification")
|
||||
|
||||
try:
|
||||
|
@ -153,6 +149,7 @@ async def handle_backup_end(
|
|||
) -> None:
|
||||
"""Backup end notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = False
|
||||
LOGGER.debug("Backup end notification")
|
||||
|
||||
try:
|
||||
|
|
|
@ -17,9 +17,62 @@ from homeassistant.components.media_player import (
|
|||
class BangOlufsenSource:
|
||||
"""Class used for associating device source ids with friendly names. May not include all sources."""
|
||||
|
||||
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
|
||||
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
URI_STREAMER: Final[Source] = Source(
|
||||
name="Audio Streamer",
|
||||
id="uriStreamer",
|
||||
is_seekable=False,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
BLUETOOTH: Final[Source] = Source(
|
||||
name="Bluetooth",
|
||||
id="bluetooth",
|
||||
is_seekable=False,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
CHROMECAST: Final[Source] = Source(
|
||||
name="Chromecast built-in",
|
||||
id="chromeCast",
|
||||
is_seekable=False,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
LINE_IN: Final[Source] = Source(
|
||||
name="Line-In",
|
||||
id="lineIn",
|
||||
is_seekable=False,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
SPDIF: Final[Source] = Source(
|
||||
name="Optical",
|
||||
id="spdif",
|
||||
is_seekable=False,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
NET_RADIO: Final[Source] = Source(
|
||||
name="B&O Radio",
|
||||
id="netRadio",
|
||||
is_seekable=False,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
DEEZER: Final[Source] = Source(
|
||||
name="Deezer",
|
||||
id="deezer",
|
||||
is_seekable=True,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
TIDAL: Final[Source] = Source(
|
||||
name="Tidal",
|
||||
id="tidal",
|
||||
is_seekable=True,
|
||||
is_enabled=True,
|
||||
is_playable=True,
|
||||
)
|
||||
|
||||
|
||||
BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = {
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"services": {
|
||||
"beolink_join": { "service": "mdi:location-enter" },
|
||||
"beolink_expand": { "service": "mdi:location-enter" },
|
||||
"beolink_unexpand": { "service": "mdi:location-exit" },
|
||||
"beolink_leave": { "service": "mdi:close-circle-outline" },
|
||||
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" }
|
||||
}
|
||||
}
|
|
@ -11,7 +11,7 @@ from typing import TYPE_CHECKING, Any, cast
|
|||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api import __version__ as MOZART_API_VERSION
|
||||
from mozart_api.exceptions import ApiException, NotFoundException
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.models import (
|
||||
Action,
|
||||
Art,
|
||||
|
@ -38,7 +38,6 @@ from mozart_api.models import (
|
|||
VolumeState,
|
||||
)
|
||||
from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
|
@ -56,17 +55,10 @@ from homeassistant.config_entries import ConfigEntry
|
|||
from homeassistant.const import CONF_MODEL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
|
@ -124,58 +116,6 @@ async def async_setup_entry(
|
|||
]
|
||||
)
|
||||
|
||||
# Register actions.
|
||||
platform = async_get_current_platform()
|
||||
|
||||
jid_regex = vol.Match(
|
||||
r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$"
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_join",
|
||||
schema={vol.Optional("beolink_jid"): jid_regex},
|
||||
func="async_beolink_join",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_expand",
|
||||
schema={
|
||||
vol.Exclusive("all_discovered", "devices", ""): cv.boolean,
|
||||
vol.Exclusive(
|
||||
"beolink_jids",
|
||||
"devices",
|
||||
"Define either specific Beolink JIDs or all discovered",
|
||||
): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_expand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_unexpand",
|
||||
schema={
|
||||
vol.Required("beolink_jids"): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_unexpand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_leave",
|
||||
schema=None,
|
||||
func="async_beolink_leave",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_allstandby",
|
||||
schema=None,
|
||||
func="async_beolink_allstandby",
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Representation of a media player."""
|
||||
|
@ -216,8 +156,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
# Beolink compatible sources
|
||||
self._beolink_sources: dict[str, bool] = {}
|
||||
self._remote_leader: BeolinkLeader | None = None
|
||||
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
|
||||
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
|
@ -227,7 +165,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
CONNECTION_STATUS: self._async_update_connection_state,
|
||||
WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes,
|
||||
WebsocketNotification.BEOLINK: self._async_update_beolink,
|
||||
WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error,
|
||||
WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress,
|
||||
|
@ -293,9 +230,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
|
||||
await self._async_update_sound_modes()
|
||||
|
||||
# Update beolink attributes and device name.
|
||||
await self._async_update_name_and_beolink()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update queue settings."""
|
||||
# The WebSocket event listener is the main handler for connection state.
|
||||
|
@ -438,44 +372,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_update_name_and_beolink(self) -> None:
|
||||
"""Update the device friendly name."""
|
||||
beolink_self = await self._client.get_beolink_self()
|
||||
|
||||
# Update device name
|
||||
device_registry = dr.async_get(self.hass)
|
||||
assert self.device_entry is not None
|
||||
|
||||
device_registry.async_update_device(
|
||||
device_id=self.device_entry.id,
|
||||
name=beolink_self.friendly_name,
|
||||
)
|
||||
|
||||
await self._async_update_beolink()
|
||||
|
||||
async def _async_update_beolink(self) -> None:
|
||||
"""Update the current Beolink leader, listeners, peers and self."""
|
||||
|
||||
self._beolink_attributes = {}
|
||||
|
||||
assert self.device_entry is not None
|
||||
assert self.device_entry.name is not None
|
||||
|
||||
# Add Beolink self
|
||||
self._beolink_attributes = {
|
||||
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
|
||||
}
|
||||
|
||||
# Add Beolink peers
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
if len(peers) > 0:
|
||||
self._beolink_attributes["beolink"]["peers"] = {}
|
||||
for peer in peers:
|
||||
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
|
||||
peer.jid
|
||||
)
|
||||
|
||||
# Add Beolink listeners / leader
|
||||
self._remote_leader = self._playback_metadata.remote_leader
|
||||
|
||||
|
@ -495,14 +394,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
# Add self
|
||||
group_members.append(self.entity_id)
|
||||
|
||||
self._beolink_attributes["beolink"]["leader"] = {
|
||||
self._remote_leader.friendly_name: self._remote_leader.jid,
|
||||
}
|
||||
|
||||
# If not listener, check if leader.
|
||||
else:
|
||||
beolink_listeners = await self._client.get_beolink_listeners()
|
||||
beolink_listeners_attribute = {}
|
||||
|
||||
# Check if the device is a leader.
|
||||
if len(beolink_listeners) > 0:
|
||||
|
@ -523,18 +417,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
for beolink_listener in beolink_listeners
|
||||
]
|
||||
)
|
||||
# Update Beolink attributes
|
||||
for beolink_listener in beolink_listeners:
|
||||
for peer in peers:
|
||||
if peer.jid == beolink_listener.jid:
|
||||
# Get the friendly names for the listeners from the peers
|
||||
beolink_listeners_attribute[peer.friendly_name] = (
|
||||
beolink_listener.jid
|
||||
)
|
||||
break
|
||||
self._beolink_attributes["beolink"]["listeners"] = (
|
||||
beolink_listeners_attribute
|
||||
)
|
||||
|
||||
self._attr_group_members = group_members
|
||||
|
||||
|
@ -688,19 +570,38 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Return the current audio source."""
|
||||
|
||||
# Try to fix some of the source_change chromecast weirdness.
|
||||
if hasattr(self._playback_metadata, "title"):
|
||||
# source_change is chromecast but line in is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name:
|
||||
return BangOlufsenSource.LINE_IN.name
|
||||
|
||||
# source_change is chromecast but bluetooth is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name:
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
# source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket,
|
||||
# And the source has not changed.
|
||||
if self._source_change.id in (
|
||||
BangOlufsenSource.BLUETOOTH.id,
|
||||
BangOlufsenSource.LINE_IN.id,
|
||||
BangOlufsenSource.SPDIF.id,
|
||||
):
|
||||
return BangOlufsenSource.CHROMECAST.name
|
||||
|
||||
# source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork
|
||||
# So i assume that it is bluetooth and not chromecast
|
||||
if (
|
||||
hasattr(self._playback_metadata, "art")
|
||||
and self._playback_metadata.art is not None
|
||||
and len(self._playback_metadata.art) == 0
|
||||
and self._source_change.id == BangOlufsenSource.CHROMECAST.id
|
||||
):
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
return self._source_change.name
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return information that is not returned anywhere else."""
|
||||
attributes: dict[str, Any] = {}
|
||||
|
||||
# Add Beolink attributes
|
||||
if self._beolink_attributes:
|
||||
attributes.update(self._beolink_attributes)
|
||||
|
||||
return attributes
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Set the device to "networkStandby"."""
|
||||
await self._client.post_standby()
|
||||
|
@ -972,30 +873,23 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
# Beolink compatible B&O device.
|
||||
# Repeated presses / calls will cycle between compatible playing devices.
|
||||
if len(group_members) == 0:
|
||||
await self.async_beolink_join()
|
||||
await self._async_beolink_join()
|
||||
return
|
||||
|
||||
# Get JID for each group member
|
||||
jids = [self._get_beolink_jid(group_member) for group_member in group_members]
|
||||
await self.async_beolink_expand(jids)
|
||||
await self._async_beolink_expand(jids)
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Unjoin Beolink session. End session if leader."""
|
||||
await self.async_beolink_leave()
|
||||
await self._async_beolink_leave()
|
||||
|
||||
# Custom actions:
|
||||
async def async_beolink_join(self, beolink_jid: str | None = None) -> None:
|
||||
async def _async_beolink_join(self) -> None:
|
||||
"""Join a Beolink multi-room experience."""
|
||||
if beolink_jid is None:
|
||||
await self._client.join_latest_beolink_experience()
|
||||
else:
|
||||
await self._client.join_beolink_peer(jid=beolink_jid)
|
||||
await self._client.join_latest_beolink_experience()
|
||||
|
||||
async def async_beolink_expand(
|
||||
self, beolink_jids: list[str] | None = None, all_discovered: bool = False
|
||||
) -> None:
|
||||
async def _async_beolink_expand(self, beolink_jids: list[str]) -> None:
|
||||
"""Expand a Beolink multi-room experience with a device or devices."""
|
||||
|
||||
# Ensure that the current source is expandable
|
||||
if not self._beolink_sources[cast(str, self._source_change.id)]:
|
||||
raise ServiceValidationError(
|
||||
|
@ -1007,37 +901,10 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
|||
},
|
||||
)
|
||||
|
||||
# Expand to all discovered devices
|
||||
if all_discovered:
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
for peer in peers:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=peer.jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning("Unable to expand to %s", peer.jid)
|
||||
|
||||
# Try to expand to all defined devices
|
||||
elif beolink_jids:
|
||||
for beolink_jid in beolink_jids:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning(
|
||||
"Unable to expand to %s. Is the device available on the network?",
|
||||
beolink_jid,
|
||||
)
|
||||
|
||||
async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None:
|
||||
"""Unexpand a Beolink multi-room experience with a device or devices."""
|
||||
# Unexpand all defined devices
|
||||
for beolink_jid in beolink_jids:
|
||||
await self._client.post_beolink_unexpand(jid=beolink_jid)
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
|
||||
async def async_beolink_leave(self) -> None:
|
||||
async def _async_beolink_leave(self) -> None:
|
||||
"""Leave the current Beolink experience."""
|
||||
await self._client.post_beolink_leave()
|
||||
|
||||
async def async_beolink_allstandby(self) -> None:
|
||||
"""Set all connected Beolink devices to standby."""
|
||||
await self._client.post_beolink_allstandby()
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
beolink_allstandby:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
example: false
|
||||
selector:
|
||||
boolean:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: false
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
||||
|
||||
beolink_join:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jid:
|
||||
required: false
|
||||
example: 1111.2222222.33333333@products.bang-olufsen.com
|
||||
selector:
|
||||
text:
|
||||
|
||||
beolink_leave:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: true
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
|
@ -1,8 +1,4 @@
|
|||
{
|
||||
"common": {
|
||||
"jid_options_name": "JID options",
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
"api_exception": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
|
@ -29,68 +25,6 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": {
|
||||
"name": "Beolink all standby",
|
||||
"description": "Set all Connected Beolink devices to standby."
|
||||
},
|
||||
"beolink_expand": {
|
||||
"name": "Beolink expand",
|
||||
"description": "Expand current Beolink experience.",
|
||||
"fields": {
|
||||
"all_discovered": {
|
||||
"name": "All discovered",
|
||||
"description": "Expand Beolink experience to all discovered devices."
|
||||
},
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will join current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_join": {
|
||||
"name": "Beolink join",
|
||||
"description": "Join a Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jid": {
|
||||
"name": "Beolink JID",
|
||||
"description": "Manually specify Beolink JID to join."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_leave": {
|
||||
"name": "Beolink leave",
|
||||
"description": "Leave a Beolink experience."
|
||||
},
|
||||
"beolink_unexpand": {
|
||||
"name": "Beolink unexpand",
|
||||
"description": "Unexpand from current Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will leave from current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"m3u_invalid_format": {
|
||||
"message": "Media sources with the .m3u extension are not supported."
|
||||
|
|
|
@ -120,11 +120,6 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
|||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.CONFIGURATION:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
|
|
|
@ -10,11 +10,7 @@ from homeassistant.components.sensor import (
|
|||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
@ -36,8 +32,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
|||
SensorEntityDescription(
|
||||
key=TYPE_WIFI_STRENGTH,
|
||||
translation_key="wifi_strength",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
|
|
@ -770,7 +770,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
|||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Send volume_up command to media player."""
|
||||
volume = int(round(volume * 100))
|
||||
volume = int(volume * 100)
|
||||
volume = min(100, volume)
|
||||
volume = max(0, volume)
|
||||
|
||||
|
|
|
@ -7,6 +7,6 @@
|
|||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"requirements": ["aiostreammagic==2.8.5"],
|
||||
"requirements": ["aiostreammagic==2.8.4"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
|
|
@ -51,13 +51,8 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = (
|
|||
CambridgeAudioSelectEntityDescription(
|
||||
key="display_brightness",
|
||||
translation_key="display_brightness",
|
||||
options=[
|
||||
DisplayBrightness.BRIGHT.value,
|
||||
DisplayBrightness.DIM.value,
|
||||
DisplayBrightness.OFF.value,
|
||||
],
|
||||
options=[x.value for x in DisplayBrightness],
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
load_fn=lambda client: client.display.brightness != DisplayBrightness.NONE,
|
||||
value_fn=lambda client: client.display.brightness,
|
||||
set_value_fn=lambda client, value: client.set_display_brightness(
|
||||
DisplayBrightness(value)
|
||||
|
|
|
@ -421,12 +421,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
if hass.config.webrtc.ice_servers:
|
||||
return hass.config.webrtc.ice_servers
|
||||
return [
|
||||
RTCIceServer(
|
||||
urls=[
|
||||
"stun:stun.home-assistant.io:80",
|
||||
"stun:stun.home-assistant.io:3478",
|
||||
]
|
||||
),
|
||||
RTCIceServer(urls="stun:stun.home-assistant.io:80"),
|
||||
RTCIceServer(urls="stun:stun.home-assistant.io:3478"),
|
||||
]
|
||||
|
||||
async_register_ice_servers(hass, get_ice_servers)
|
||||
|
|
|
@ -6,7 +6,7 @@ from abc import ABC, abstractmethod
|
|||
import asyncio
|
||||
from collections.abc import Awaitable, Callable, Iterable
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from functools import cache, partial, wraps
|
||||
from functools import cache, partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
|
@ -205,49 +205,6 @@ async def _async_refresh_providers(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
|
||||
type WsCommandWithCamera = Callable[
|
||||
[websocket_api.ActiveConnection, dict[str, Any], Camera],
|
||||
Awaitable[None],
|
||||
]
|
||||
|
||||
|
||||
def require_webrtc_support(
|
||||
error_code: str,
|
||||
) -> Callable[[WsCommandWithCamera], websocket_api.AsyncWebSocketCommandHandler]:
|
||||
"""Validate that the camera supports WebRTC."""
|
||||
|
||||
def decorate(
|
||||
func: WsCommandWithCamera,
|
||||
) -> websocket_api.AsyncWebSocketCommandHandler:
|
||||
"""Decorate func."""
|
||||
|
||||
@wraps(func)
|
||||
async def validate(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Validate that the camera supports WebRTC."""
|
||||
entity_id = msg["entity_id"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
error_code,
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
await func(connection, msg, camera)
|
||||
|
||||
return validate
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "camera/webrtc/offer",
|
||||
|
@ -256,9 +213,8 @@ def require_webrtc_support(
|
|||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@require_webrtc_support("webrtc_offer_failed")
|
||||
async def ws_webrtc_offer(
|
||||
connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle the signal path for a WebRTC stream.
|
||||
|
||||
|
@ -270,7 +226,20 @@ async def ws_webrtc_offer(
|
|||
|
||||
Async friendly.
|
||||
"""
|
||||
entity_id = msg["entity_id"]
|
||||
offer = msg["offer"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"webrtc_offer_failed",
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
session_id = ulid()
|
||||
connection.subscriptions[msg["id"]] = partial(
|
||||
camera.close_webrtc_session, session_id
|
||||
|
@ -309,11 +278,23 @@ async def ws_webrtc_offer(
|
|||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@require_webrtc_support("webrtc_get_client_config_failed")
|
||||
async def ws_get_client_config(
|
||||
connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle get WebRTC client config websocket command."""
|
||||
entity_id = msg["entity_id"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"webrtc_get_client_config_failed",
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
config = camera.async_get_webrtc_client_configuration().to_frontend_dict()
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
|
@ -330,11 +311,23 @@ async def ws_get_client_config(
|
|||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@require_webrtc_support("webrtc_candidate_failed")
|
||||
async def ws_candidate(
|
||||
connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle WebRTC candidate websocket command."""
|
||||
entity_id = msg["entity_id"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"webrtc_candidate_failed",
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
await camera.async_on_webrtc_candidate(
|
||||
msg["session_id"], RTCIceCandidate(msg["candidate"])
|
||||
)
|
||||
|
|
|
@ -440,16 +440,16 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
|
|||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "cloud/update_prefs",
|
||||
vol.Optional(PREF_ALEXA_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_ENABLE_ALEXA): bool,
|
||||
vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool,
|
||||
vol.Optional(PREF_ENABLE_GOOGLE): bool,
|
||||
vol.Optional(PREF_ENABLE_ALEXA): bool,
|
||||
vol.Optional(PREF_ALEXA_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_GOOGLE_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str),
|
||||
vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool,
|
||||
vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All(
|
||||
vol.Coerce(tuple), validate_language_voice
|
||||
),
|
||||
vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool,
|
||||
vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
|
|
@ -163,21 +163,21 @@ class CloudPreferences:
|
|||
async def async_update(
|
||||
self,
|
||||
*,
|
||||
alexa_enabled: bool | UndefinedType = UNDEFINED,
|
||||
alexa_report_state: bool | UndefinedType = UNDEFINED,
|
||||
alexa_settings_version: int | UndefinedType = UNDEFINED,
|
||||
cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED,
|
||||
cloud_user: str | UndefinedType = UNDEFINED,
|
||||
cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED,
|
||||
google_connected: bool | UndefinedType = UNDEFINED,
|
||||
google_enabled: bool | UndefinedType = UNDEFINED,
|
||||
google_report_state: bool | UndefinedType = UNDEFINED,
|
||||
google_secure_devices_pin: str | None | UndefinedType = UNDEFINED,
|
||||
google_settings_version: int | UndefinedType = UNDEFINED,
|
||||
remote_allow_remote_enable: bool | UndefinedType = UNDEFINED,
|
||||
remote_domain: str | None | UndefinedType = UNDEFINED,
|
||||
alexa_enabled: bool | UndefinedType = UNDEFINED,
|
||||
remote_enabled: bool | UndefinedType = UNDEFINED,
|
||||
google_secure_devices_pin: str | None | UndefinedType = UNDEFINED,
|
||||
cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED,
|
||||
cloud_user: str | UndefinedType = UNDEFINED,
|
||||
alexa_report_state: bool | UndefinedType = UNDEFINED,
|
||||
google_report_state: bool | UndefinedType = UNDEFINED,
|
||||
tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED,
|
||||
remote_domain: str | None | UndefinedType = UNDEFINED,
|
||||
alexa_settings_version: int | UndefinedType = UNDEFINED,
|
||||
google_settings_version: int | UndefinedType = UNDEFINED,
|
||||
google_connected: bool | UndefinedType = UNDEFINED,
|
||||
remote_allow_remote_enable: bool | UndefinedType = UNDEFINED,
|
||||
cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update user preferences."""
|
||||
prefs = {**self._prefs}
|
||||
|
@ -186,21 +186,21 @@ class CloudPreferences:
|
|||
{
|
||||
key: value
|
||||
for key, value in (
|
||||
(PREF_ALEXA_REPORT_STATE, alexa_report_state),
|
||||
(PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version),
|
||||
(PREF_CLOUD_USER, cloud_user),
|
||||
(PREF_CLOUDHOOKS, cloudhooks),
|
||||
(PREF_ENABLE_ALEXA, alexa_enabled),
|
||||
(PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled),
|
||||
(PREF_ENABLE_GOOGLE, google_enabled),
|
||||
(PREF_ENABLE_ALEXA, alexa_enabled),
|
||||
(PREF_ENABLE_REMOTE, remote_enabled),
|
||||
(PREF_GOOGLE_CONNECTED, google_connected),
|
||||
(PREF_GOOGLE_REPORT_STATE, google_report_state),
|
||||
(PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin),
|
||||
(PREF_CLOUDHOOKS, cloudhooks),
|
||||
(PREF_CLOUD_USER, cloud_user),
|
||||
(PREF_ALEXA_REPORT_STATE, alexa_report_state),
|
||||
(PREF_GOOGLE_REPORT_STATE, google_report_state),
|
||||
(PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version),
|
||||
(PREF_GOOGLE_SETTINGS_VERSION, google_settings_version),
|
||||
(PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable),
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
(PREF_TTS_DEFAULT_VOICE, tts_default_voice),
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
(PREF_GOOGLE_CONNECTED, google_connected),
|
||||
(PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable),
|
||||
(PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled),
|
||||
)
|
||||
if value is not UNDEFINED
|
||||
}
|
||||
|
@ -242,7 +242,6 @@ class CloudPreferences:
|
|||
PREF_ALEXA_REPORT_STATE: self.alexa_report_state,
|
||||
PREF_CLOUDHOOKS: self.cloudhooks,
|
||||
PREF_ENABLE_ALEXA: self.alexa_enabled,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled,
|
||||
PREF_ENABLE_GOOGLE: self.google_enabled,
|
||||
PREF_ENABLE_REMOTE: self.remote_enabled,
|
||||
PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose,
|
||||
|
@ -250,6 +249,7 @@ class CloudPreferences:
|
|||
PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin,
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable,
|
||||
PREF_TTS_DEFAULT_VOICE: self.tts_default_voice,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled,
|
||||
}
|
||||
|
||||
@property
|
||||
|
|
|
@ -168,7 +168,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=get_extra_name(data) or "Electricity Maps",
|
||||
title=get_extra_name(data) or "CO2 Signal",
|
||||
data=data,
|
||||
)
|
||||
|
||||
|
|
|
@ -4,5 +4,5 @@
|
|||
"codeowners": ["@Petro31"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"requirements": ["numpy==2.1.3"]
|
||||
"requirements": ["numpy==2.1.2"]
|
||||
}
|
||||
|
|
|
@ -46,6 +46,13 @@ def async_setup(hass: HomeAssistant) -> bool:
|
|||
hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options))
|
||||
hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options))
|
||||
|
||||
hass.http.register_view(
|
||||
SubentryManagerFlowIndexView(hass.config_entries.subentries)
|
||||
)
|
||||
hass.http.register_view(
|
||||
SubentryManagerFlowResourceView(hass.config_entries.subentries)
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(hass, config_entries_get)
|
||||
websocket_api.async_register_command(hass, config_entry_disable)
|
||||
websocket_api.async_register_command(hass, config_entry_get_single)
|
||||
|
@ -54,6 +61,9 @@ def async_setup(hass: HomeAssistant) -> bool:
|
|||
websocket_api.async_register_command(hass, config_entries_progress)
|
||||
websocket_api.async_register_command(hass, ignore_config_flow)
|
||||
|
||||
websocket_api.async_register_command(hass, config_subentry_delete)
|
||||
websocket_api.async_register_command(hass, config_subentry_list)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@ -285,6 +295,48 @@ class OptionManagerFlowResourceView(
|
|||
return await super().post(request, flow_id)
|
||||
|
||||
|
||||
class SubentryManagerFlowIndexView(
|
||||
FlowManagerIndexView[config_entries.ConfigSubentryFlowManager]
|
||||
):
|
||||
"""View to create subentry flows."""
|
||||
|
||||
url = "/api/config/config_entries/subentries/flow"
|
||||
name = "api:config:config_entries:subentries:flow"
|
||||
|
||||
@require_admin(
|
||||
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
|
||||
)
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle a POST request.
|
||||
|
||||
handler in request is entry_id.
|
||||
"""
|
||||
return await super().post(request)
|
||||
|
||||
|
||||
class SubentryManagerFlowResourceView(
|
||||
FlowManagerResourceView[config_entries.ConfigSubentryFlowManager]
|
||||
):
|
||||
"""View to interact with the subentry flow manager."""
|
||||
|
||||
url = "/api/config/config_entries/subentries/flow/{flow_id}"
|
||||
name = "api:config:config_entries:subentries:flow:resource"
|
||||
|
||||
@require_admin(
|
||||
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
|
||||
)
|
||||
async def get(self, request: web.Request, /, flow_id: str) -> web.Response:
|
||||
"""Get the current state of a data_entry_flow."""
|
||||
return await super().get(request, flow_id)
|
||||
|
||||
@require_admin(
|
||||
error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
|
||||
)
|
||||
async def post(self, request: web.Request, flow_id: str) -> web.Response:
|
||||
"""Handle a POST request."""
|
||||
return await super().post(request, flow_id)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({"type": "config_entries/flow/progress"})
|
||||
def config_entries_progress(
|
||||
|
@ -588,3 +640,62 @@ async def _async_matching_config_entries_json_fragments(
|
|||
)
|
||||
or (filter_is_not_helper and entry.domain not in integrations)
|
||||
]
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "config_entries/subentries/list",
|
||||
"entry_id": str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def config_subentry_list(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""List subentries of a config entry."""
|
||||
entry = get_entry(hass, connection, msg["entry_id"], msg["id"])
|
||||
if entry is None:
|
||||
return
|
||||
|
||||
result = [
|
||||
{
|
||||
"subentry_id": subentry.subentry_id,
|
||||
"title": subentry.title,
|
||||
"unique_id": subentry.unique_id,
|
||||
}
|
||||
for subentry_id, subentry in entry.subentries.items()
|
||||
]
|
||||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
"type": "config_entries/subentries/delete",
|
||||
"entry_id": str,
|
||||
"subentry_id": str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def config_subentry_delete(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Delete a subentry of a config entry."""
|
||||
entry = get_entry(hass, connection, msg["entry_id"], msg["id"])
|
||||
if entry is None:
|
||||
return
|
||||
|
||||
try:
|
||||
hass.config_entries.async_remove_subentry(entry, msg["subentry_id"])
|
||||
except config_entries.UnknownSubEntry:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found"
|
||||
)
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
|
|
@ -16,11 +16,11 @@ from hassil.expression import Expression, ListReference, Sequence
|
|||
from hassil.intents import Intents, SlotList, TextSlotList, WildcardSlotList
|
||||
from hassil.recognize import (
|
||||
MISSING_ENTITY,
|
||||
MatchEntity,
|
||||
RecognizeResult,
|
||||
UnmatchedTextEntity,
|
||||
recognize_all,
|
||||
recognize_best,
|
||||
)
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from hassil.util import merge_dict
|
||||
from home_assistant_intents import ErrorKey, get_intents, get_languages
|
||||
import yaml
|
||||
|
@ -294,7 +294,7 @@ class DefaultAgent(ConversationEntity):
|
|||
self.hass, language, DOMAIN, [DOMAIN]
|
||||
)
|
||||
response_text = translations.get(
|
||||
f"component.{DOMAIN}.conversation.agent.done", "Done"
|
||||
f"component.{DOMAIN}.agent.done", "Done"
|
||||
)
|
||||
|
||||
response.async_set_speech(response_text)
|
||||
|
@ -499,7 +499,6 @@ class DefaultAgent(ConversationEntity):
|
|||
maybe_result: RecognizeResult | None = None
|
||||
best_num_matched_entities = 0
|
||||
best_num_unmatched_entities = 0
|
||||
best_num_unmatched_ranges = 0
|
||||
for result in recognize_all(
|
||||
user_input.text,
|
||||
lang_intents.intents,
|
||||
|
@ -518,14 +517,10 @@ class DefaultAgent(ConversationEntity):
|
|||
num_matched_entities += 1
|
||||
|
||||
num_unmatched_entities = 0
|
||||
num_unmatched_ranges = 0
|
||||
for unmatched_entity in result.unmatched_entities_list:
|
||||
if isinstance(unmatched_entity, UnmatchedTextEntity):
|
||||
if unmatched_entity.text != MISSING_ENTITY:
|
||||
num_unmatched_entities += 1
|
||||
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
|
||||
num_unmatched_ranges += 1
|
||||
num_unmatched_entities += 1
|
||||
else:
|
||||
num_unmatched_entities += 1
|
||||
|
||||
|
@ -537,24 +532,15 @@ class DefaultAgent(ConversationEntity):
|
|||
(num_matched_entities == best_num_matched_entities)
|
||||
and (num_unmatched_entities < best_num_unmatched_entities)
|
||||
)
|
||||
or (
|
||||
# Prefer unmatched ranges
|
||||
(num_matched_entities == best_num_matched_entities)
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges > best_num_unmatched_ranges)
|
||||
)
|
||||
or (
|
||||
# More literal text matched
|
||||
(num_matched_entities == best_num_matched_entities)
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
||||
and (result.text_chunks_matched > maybe_result.text_chunks_matched)
|
||||
)
|
||||
or (
|
||||
# Prefer match failures with entities
|
||||
(result.text_chunks_matched == maybe_result.text_chunks_matched)
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
||||
and (
|
||||
("name" in result.entities)
|
||||
or ("name" in result.unmatched_entities)
|
||||
|
@ -564,7 +550,6 @@ class DefaultAgent(ConversationEntity):
|
|||
maybe_result = result
|
||||
best_num_matched_entities = num_matched_entities
|
||||
best_num_unmatched_entities = num_unmatched_entities
|
||||
best_num_unmatched_ranges = num_unmatched_ranges
|
||||
|
||||
return maybe_result
|
||||
|
||||
|
@ -577,15 +562,76 @@ class DefaultAgent(ConversationEntity):
|
|||
language: str,
|
||||
) -> RecognizeResult | None:
|
||||
"""Search intents for a strict match to user input."""
|
||||
return recognize_best(
|
||||
custom_found = False
|
||||
name_found = False
|
||||
best_results: list[RecognizeResult] = []
|
||||
best_name_quality: int | None = None
|
||||
best_text_chunks_matched: int | None = None
|
||||
for result in recognize_all(
|
||||
user_input.text,
|
||||
lang_intents.intents,
|
||||
slot_lists=slot_lists,
|
||||
intent_context=intent_context,
|
||||
language=language,
|
||||
best_metadata_key=METADATA_CUSTOM_SENTENCE,
|
||||
best_slot_name="name",
|
||||
)
|
||||
):
|
||||
# Prioritize user intents
|
||||
is_custom = (
|
||||
result.intent_metadata is not None
|
||||
and result.intent_metadata.get(METADATA_CUSTOM_SENTENCE)
|
||||
)
|
||||
|
||||
if custom_found and not is_custom:
|
||||
continue
|
||||
|
||||
if not custom_found and is_custom:
|
||||
custom_found = True
|
||||
# Clear builtin results
|
||||
name_found = False
|
||||
best_results = []
|
||||
best_name_quality = None
|
||||
best_text_chunks_matched = None
|
||||
|
||||
# Prioritize results with a "name" slot
|
||||
name = result.entities.get("name")
|
||||
is_name = name and not name.is_wildcard
|
||||
|
||||
if name_found and not is_name:
|
||||
continue
|
||||
|
||||
if not name_found and is_name:
|
||||
name_found = True
|
||||
# Clear non-name results
|
||||
best_results = []
|
||||
best_text_chunks_matched = None
|
||||
|
||||
if is_name:
|
||||
# Prioritize results with a better "name" slot
|
||||
name_quality = len(cast(MatchEntity, name).value.split())
|
||||
if (best_name_quality is None) or (name_quality > best_name_quality):
|
||||
best_name_quality = name_quality
|
||||
# Clear worse name results
|
||||
best_results = []
|
||||
best_text_chunks_matched = None
|
||||
elif name_quality < best_name_quality:
|
||||
continue
|
||||
|
||||
# Prioritize results with more literal text
|
||||
# This causes wildcards to match last.
|
||||
if (best_text_chunks_matched is None) or (
|
||||
result.text_chunks_matched > best_text_chunks_matched
|
||||
):
|
||||
best_results = [result]
|
||||
best_text_chunks_matched = result.text_chunks_matched
|
||||
elif result.text_chunks_matched == best_text_chunks_matched:
|
||||
# Accumulate results with the same number of literal text matched.
|
||||
# We will resolve the ambiguity below.
|
||||
best_results.append(result)
|
||||
|
||||
if best_results:
|
||||
# Successful strict match
|
||||
return best_results[0]
|
||||
|
||||
return None
|
||||
|
||||
async def _build_speech(
|
||||
self,
|
||||
|
|
|
@ -6,8 +6,12 @@ from collections.abc import Iterable
|
|||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from hassil.recognize import MISSING_ENTITY, RecognizeResult
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from hassil.recognize import (
|
||||
MISSING_ENTITY,
|
||||
RecognizeResult,
|
||||
UnmatchedRangeEntity,
|
||||
UnmatchedTextEntity,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http, websocket_api
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.0.1", "home-assistant-intents==2024.11.13"]
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.6"]
|
||||
}
|
||||
|
|
|
@ -4,8 +4,7 @@ from __future__ import annotations
|
|||
|
||||
from typing import Any
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
from hassil.util import PUNCTUATION_ALL
|
||||
from hassil.recognize import PUNCTUATION, RecognizeResult
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_COMMAND, CONF_PLATFORM
|
||||
|
@ -21,7 +20,7 @@ from .const import DATA_DEFAULT_ENTITY, DOMAIN
|
|||
def has_no_punctuation(value: list[str]) -> list[str]:
|
||||
"""Validate result does not contain punctuation."""
|
||||
for sentence in value:
|
||||
if PUNCTUATION_ALL.search(sentence):
|
||||
if PUNCTUATION.search(sentence):
|
||||
raise vol.Invalid("sentence should not contain punctuation")
|
||||
|
||||
return value
|
||||
|
|
|
@ -5,5 +5,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/doods",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydoods"],
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.0.0"]
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==10.4.0"]
|
||||
}
|
||||
|
|
|
@ -6,14 +6,9 @@ from collections.abc import Awaitable, Callable
|
|||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime
|
||||
from homeassistant.const import UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
|
@ -59,30 +54,21 @@ async def async_setup_entry(
|
|||
) -> None:
|
||||
"""Set up the ecobee thermostat number entity."""
|
||||
data: EcobeeData = hass.data[DOMAIN]
|
||||
_LOGGER.debug("Adding min time ventilators numbers (if present)")
|
||||
|
||||
assert data is not None
|
||||
|
||||
entities: list[NumberEntity] = [
|
||||
EcobeeVentilatorMinTime(data, index, numbers)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
for numbers in VENTILATOR_NUMBERS
|
||||
]
|
||||
|
||||
_LOGGER.debug("Adding compressor min temp number (if present)")
|
||||
entities.extend(
|
||||
async_add_entities(
|
||||
(
|
||||
EcobeeCompressorMinTemp(data, index)
|
||||
EcobeeVentilatorMinTime(data, index, numbers)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["hasHeatPump"]
|
||||
)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
for numbers in VENTILATOR_NUMBERS
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity):
|
||||
"""A number class, representing min time for an ecobee thermostat with ventilator attached."""
|
||||
"""A number class, representing min time for an ecobee thermostat with ventilator attached."""
|
||||
|
||||
entity_description: EcobeeNumberEntityDescription
|
||||
|
||||
|
@ -119,53 +105,3 @@ class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity):
|
|||
"""Set new ventilator Min On Time value."""
|
||||
self.entity_description.set_fn(self.data, self.thermostat_index, int(value))
|
||||
self.update_without_throttle = True
|
||||
|
||||
|
||||
class EcobeeCompressorMinTemp(EcobeeBaseEntity, NumberEntity):
|
||||
"""Minimum outdoor temperature at which the compressor will operate.
|
||||
|
||||
This applies more to air source heat pumps than geothermal. This serves as a safety
|
||||
feature (compressors have a minimum operating temperature) as well as
|
||||
providing the ability to choose fuel in a dual-fuel system (i.e. choose between
|
||||
electrical heat pump and fossil auxiliary heat depending on Time of Use, Solar,
|
||||
etc.).
|
||||
Note that python-ecobee-api refers to this as Aux Cutover Threshold, but Ecobee
|
||||
uses Compressor Protection Min Temp.
|
||||
"""
|
||||
|
||||
_attr_device_class = NumberDeviceClass.TEMPERATURE
|
||||
_attr_has_entity_name = True
|
||||
_attr_icon = "mdi:thermometer-off"
|
||||
_attr_mode = NumberMode.BOX
|
||||
_attr_native_min_value = -25
|
||||
_attr_native_max_value = 66
|
||||
_attr_native_step = 5
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.FAHRENHEIT
|
||||
_attr_translation_key = "compressor_protection_min_temp"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: EcobeeData,
|
||||
thermostat_index: int,
|
||||
) -> None:
|
||||
"""Initialize ecobee compressor min temperature."""
|
||||
super().__init__(data, thermostat_index)
|
||||
self._attr_unique_id = f"{self.base_unique_id}_compressor_protection_min_temp"
|
||||
self.update_without_throttle = False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest state from the thermostat."""
|
||||
if self.update_without_throttle:
|
||||
await self.data.update(no_throttle=True)
|
||||
self.update_without_throttle = False
|
||||
else:
|
||||
await self.data.update()
|
||||
|
||||
self._attr_native_value = (
|
||||
(self.thermostat["settings"]["compressorProtectionMinTemp"]) / 10
|
||||
)
|
||||
|
||||
def set_native_value(self, value: float) -> None:
|
||||
"""Set new compressor minimum temperature."""
|
||||
self.data.ecobee.set_aux_cutover_threshold(self.thermostat_index, value)
|
||||
self.update_without_throttle = True
|
||||
|
|
|
@ -33,18 +33,15 @@
|
|||
},
|
||||
"number": {
|
||||
"ventilator_min_type_home": {
|
||||
"name": "Ventilator minimum time home"
|
||||
"name": "Ventilator min time home"
|
||||
},
|
||||
"ventilator_min_type_away": {
|
||||
"name": "Ventilator minimum time away"
|
||||
},
|
||||
"compressor_protection_min_temp": {
|
||||
"name": "Compressor minimum temperature"
|
||||
"name": "Ventilator min time away"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"aux_heat_only": {
|
||||
"name": "Auxiliary heat only"
|
||||
"name": "Aux heat only"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==8.4.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==8.4.0"]
|
||||
}
|
||||
|
|
|
@ -5,11 +5,8 @@ from pyemoncms import EmoncmsClient
|
|||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN, EMONCMS_UUID_DOC_URL, LOGGER
|
||||
from .coordinator import EmoncmsCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
@ -17,49 +14,6 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
|
|||
type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator]
|
||||
|
||||
|
||||
def _migrate_unique_id(
|
||||
hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_unique_id: str
|
||||
) -> None:
|
||||
"""Migrate to emoncms unique id if needed."""
|
||||
ent_reg = er.async_get(hass)
|
||||
entry_entities = ent_reg.entities.get_entries_for_config_entry_id(entry.entry_id)
|
||||
for entity in entry_entities:
|
||||
if entity.unique_id.split("-")[0] == entry.entry_id:
|
||||
feed_id = entity.unique_id.split("-")[-1]
|
||||
LOGGER.debug(f"moving feed {feed_id} to hardware uuid")
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}"
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
unique_id=emoncms_unique_id,
|
||||
)
|
||||
|
||||
|
||||
async def _check_unique_id_migration(
|
||||
hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_client: EmoncmsClient
|
||||
) -> None:
|
||||
"""Check if we can migrate to the emoncms uuid."""
|
||||
emoncms_unique_id = await emoncms_client.async_get_uuid()
|
||||
if emoncms_unique_id:
|
||||
if entry.unique_id != emoncms_unique_id:
|
||||
_migrate_unique_id(hass, entry, emoncms_unique_id)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"migrate database",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="migrate_database",
|
||||
translation_placeholders={
|
||||
"url": entry.data[CONF_URL],
|
||||
"doc_url": EMONCMS_UUID_DOC_URL,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool:
|
||||
"""Load a config entry."""
|
||||
emoncms_client = EmoncmsClient(
|
||||
|
@ -67,7 +21,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> b
|
|||
entry.data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
await _check_unique_id_migration(hass, entry, emoncms_client)
|
||||
coordinator = EmoncmsCoordinator(hass, emoncms_client)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
|
|
@ -14,7 +14,7 @@ from homeassistant.config_entries import (
|
|||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import selector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
@ -48,10 +48,13 @@ def sensor_name(url: str) -> str:
|
|||
return f"emoncms@{sensorip}"
|
||||
|
||||
|
||||
async def get_feed_list(
|
||||
emoncms_client: EmoncmsClient,
|
||||
) -> dict[str, Any]:
|
||||
async def get_feed_list(hass: HomeAssistant, url: str, api_key: str) -> dict[str, Any]:
|
||||
"""Check connection to emoncms and return feed list if successful."""
|
||||
emoncms_client = EmoncmsClient(
|
||||
url,
|
||||
api_key,
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
return await emoncms_client.async_request("/feed/list.json")
|
||||
|
||||
|
||||
|
@ -79,25 +82,22 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
description_placeholders = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.url = user_input[CONF_URL]
|
||||
self.api_key = user_input[CONF_API_KEY]
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_API_KEY: self.api_key,
|
||||
CONF_URL: self.url,
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
}
|
||||
)
|
||||
emoncms_client = EmoncmsClient(
|
||||
self.url, self.api_key, session=async_get_clientsession(self.hass)
|
||||
result = await get_feed_list(
|
||||
self.hass, user_input[CONF_URL], user_input[CONF_API_KEY]
|
||||
)
|
||||
result = await get_feed_list(emoncms_client)
|
||||
if not result[CONF_SUCCESS]:
|
||||
errors["base"] = "api_error"
|
||||
description_placeholders = {"details": result[CONF_MESSAGE]}
|
||||
else:
|
||||
self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID)
|
||||
await self.async_set_unique_id(await emoncms_client.async_get_uuid())
|
||||
self._abort_if_unique_id_configured()
|
||||
self.url = user_input[CONF_URL]
|
||||
self.api_key = user_input[CONF_API_KEY]
|
||||
options = get_options(result[CONF_MESSAGE])
|
||||
self.dropdown = {
|
||||
"options": options,
|
||||
|
@ -191,12 +191,7 @@ class EmoncmsOptionsFlow(OptionsFlow):
|
|||
self.config_entry.data.get(CONF_ONLY_INCLUDE_FEEDID, []),
|
||||
)
|
||||
options: list = include_only_feeds
|
||||
emoncms_client = EmoncmsClient(
|
||||
self._url,
|
||||
self._api_key,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
result = await get_feed_list(emoncms_client)
|
||||
result = await get_feed_list(self.hass, self._url, self._api_key)
|
||||
if not result[CONF_SUCCESS]:
|
||||
errors["base"] = "api_error"
|
||||
description_placeholders = {"details": result[CONF_MESSAGE]}
|
||||
|
|
|
@ -7,10 +7,6 @@ CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id"
|
|||
CONF_MESSAGE = "message"
|
||||
CONF_SUCCESS = "success"
|
||||
DOMAIN = "emoncms"
|
||||
EMONCMS_UUID_DOC_URL = (
|
||||
"https://docs.openenergymonitor.org/emoncms/update.html"
|
||||
"#upgrading-to-a-version-producing-a-unique-identifier"
|
||||
)
|
||||
FEED_ID = "id"
|
||||
FEED_NAME = "name"
|
||||
FEED_TAG = "tag"
|
||||
|
|
|
@ -148,20 +148,20 @@ async def async_setup_entry(
|
|||
return
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
# uuid was added in emoncms database 11.5.7
|
||||
unique_id = entry.unique_id if entry.unique_id else entry.entry_id
|
||||
elems = coordinator.data
|
||||
if not elems:
|
||||
return
|
||||
|
||||
sensors: list[EmonCmsSensor] = []
|
||||
|
||||
for idx, elem in enumerate(elems):
|
||||
if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds:
|
||||
continue
|
||||
|
||||
sensors.append(
|
||||
EmonCmsSensor(
|
||||
coordinator,
|
||||
unique_id,
|
||||
entry.entry_id,
|
||||
elem["unit"],
|
||||
name,
|
||||
idx,
|
||||
|
@ -176,7 +176,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
|||
def __init__(
|
||||
self,
|
||||
coordinator: EmoncmsCoordinator,
|
||||
unique_id: str,
|
||||
entry_id: str,
|
||||
unit_of_measurement: str | None,
|
||||
name: str,
|
||||
idx: int,
|
||||
|
@ -189,7 +189,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
|||
elem = self.coordinator.data[self.idx]
|
||||
self._attr_name = f"{name} {elem[FEED_NAME]}"
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._attr_unique_id = f"{unique_id}-{elem[FEED_ID]}"
|
||||
self._attr_unique_id = f"{entry_id}-{elem[FEED_ID]}"
|
||||
if unit_of_measurement in ("kWh", "Wh"):
|
||||
self._attr_device_class = SensorDeviceClass.ENERGY
|
||||
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
|
|
|
@ -19,9 +19,6 @@
|
|||
"include_only_feed_id": "Choose feeds to include"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "This server is already configured"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
@ -44,10 +41,6 @@
|
|||
"missing_include_only_feed_id": {
|
||||
"title": "No feed synchronized with the {domain} sensor",
|
||||
"description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration."
|
||||
},
|
||||
"migrate_database": {
|
||||
"title": "Upgrade your emoncms version",
|
||||
"description": "Your [emoncms]({url}) does not ship a unique identifier.\n\n Please upgrade to at least version 11.5.7 and migrate your emoncms database.\n\n More info on [emoncms documentation]({doc_url})"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,23 +15,17 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED
|
||||
from .const import DOMAIN, SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED
|
||||
from .models import Eq3Config, Eq3ConfigEntryData
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.NUMBER,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type Eq3ConfigEntry = ConfigEntry[Eq3ConfigEntryData]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Handle config entry setup."""
|
||||
|
||||
mac_address: str | None = entry.unique_id
|
||||
|
@ -59,11 +53,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
|||
ble_device=device,
|
||||
)
|
||||
|
||||
entry.runtime_data = Eq3ConfigEntryData(
|
||||
eq3_config=eq3_config, thermostat=thermostat
|
||||
)
|
||||
eq3_config_entry = Eq3ConfigEntryData(eq3_config=eq3_config, thermostat=thermostat)
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = eq3_config_entry
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_create_background_task(
|
||||
hass, _async_run_thermostat(hass, entry), entry.entry_id
|
||||
)
|
||||
|
@ -71,27 +66,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Handle config entry unload."""
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
await entry.runtime_data.thermostat.async_disconnect()
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
await eq3_config_entry.thermostat.async_disconnect()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None:
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle config entry update."""
|
||||
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None:
|
||||
async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Run the thermostat."""
|
||||
|
||||
thermostat = entry.runtime_data.thermostat
|
||||
mac_address = entry.runtime_data.eq3_config.mac_address
|
||||
scan_interval = entry.runtime_data.eq3_config.scan_interval
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id]
|
||||
thermostat = eq3_config_entry.thermostat
|
||||
mac_address = eq3_config_entry.eq3_config.mac_address
|
||||
scan_interval = eq3_config_entry.eq3_config.scan_interval
|
||||
|
||||
await _async_reconnect_thermostat(hass, entry)
|
||||
|
||||
|
@ -120,14 +117,13 @@ async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> N
|
|||
await asyncio.sleep(scan_interval)
|
||||
|
||||
|
||||
async def _async_reconnect_thermostat(
|
||||
hass: HomeAssistant, entry: Eq3ConfigEntry
|
||||
) -> None:
|
||||
async def _async_reconnect_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Reconnect the thermostat."""
|
||||
|
||||
thermostat = entry.runtime_data.thermostat
|
||||
mac_address = entry.runtime_data.eq3_config.mac_address
|
||||
scan_interval = entry.runtime_data.eq3_config.scan_interval
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id]
|
||||
thermostat = eq3_config_entry.thermostat
|
||||
mac_address = eq3_config_entry.eq3_config.mac_address
|
||||
scan_interval = eq3_config_entry.eq3_config.scan_interval
|
||||
|
||||
while True:
|
||||
try:
|
||||
|
|
|
@ -1,86 +0,0 @@
|
|||
"""Platform for eq3 binary sensor entities."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import ENTITY_KEY_BATTERY, ENTITY_KEY_DST, ENTITY_KEY_WINDOW
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3BinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Entity description for eq3 binary sensors."""
|
||||
|
||||
value_func: Callable[[Status], bool]
|
||||
|
||||
|
||||
BINARY_SENSOR_ENTITY_DESCRIPTIONS = [
|
||||
Eq3BinarySensorEntityDescription(
|
||||
value_func=lambda status: status.is_low_battery,
|
||||
key=ENTITY_KEY_BATTERY,
|
||||
device_class=BinarySensorDeviceClass.BATTERY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
Eq3BinarySensorEntityDescription(
|
||||
value_func=lambda status: status.is_window_open,
|
||||
key=ENTITY_KEY_WINDOW,
|
||||
device_class=BinarySensorDeviceClass.WINDOW,
|
||||
),
|
||||
Eq3BinarySensorEntityDescription(
|
||||
value_func=lambda status: status.is_dst,
|
||||
key=ENTITY_KEY_DST,
|
||||
translation_key=ENTITY_KEY_DST,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the entry."""
|
||||
|
||||
async_add_entities(
|
||||
Eq3BinarySensorEntity(entry, entity_description)
|
||||
for entity_description in BINARY_SENSOR_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity):
|
||||
"""Base class for eQ-3 binary sensor entities."""
|
||||
|
||||
entity_description: Eq3BinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: Eq3ConfigEntry,
|
||||
entity_description: Eq3BinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(entry, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the binary sensor."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
|
@ -3,6 +3,7 @@
|
|||
import logging
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
|
||||
|
@ -14,35 +15,45 @@ from homeassistant.components.climate import (
|
|||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEVICE_MODEL,
|
||||
DOMAIN,
|
||||
EQ_TO_HA_HVAC,
|
||||
HA_TO_EQ_HVAC,
|
||||
MANUFACTURER,
|
||||
SIGNAL_THERMOSTAT_CONNECTED,
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED,
|
||||
CurrentTemperatureSelector,
|
||||
Preset,
|
||||
TargetTemperatureSelector,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
from .models import Eq3Config, Eq3ConfigEntryData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Eq3ConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Handle config entry setup."""
|
||||
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
async_add_entities(
|
||||
[Eq3Climate(entry)],
|
||||
[Eq3Climate(eq3_config_entry.eq3_config, eq3_config_entry.thermostat)],
|
||||
)
|
||||
|
||||
|
||||
|
@ -69,6 +80,53 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
_attr_preset_mode: str | None = None
|
||||
_target_temperature: float | None = None
|
||||
|
||||
def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
|
||||
super().__init__(eq3_config, thermostat)
|
||||
self._attr_unique_id = dr.format_mac(eq3_config.mac_address)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=slugify(self._eq3_config.mac_address),
|
||||
manufacturer=MANUFACTURER,
|
||||
model=DEVICE_MODEL,
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_disconnected,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_connected,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
|
||||
@callback
|
||||
def _async_on_disconnected(self) -> None:
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_connected(self) -> None:
|
||||
self._attr_available = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
@ -79,15 +137,12 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
if self._thermostat.device_data is not None:
|
||||
self._async_on_device_updated()
|
||||
|
||||
super()._async_on_updated()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_status_updated(self) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
if self._thermostat.status is None:
|
||||
return
|
||||
|
||||
self._target_temperature = self._thermostat.status.target_temperature.value
|
||||
self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode]
|
||||
self._attr_current_temperature = self._get_current_temperature()
|
||||
|
@ -99,16 +154,13 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
def _async_on_device_updated(self) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
if self._thermostat.device_data is None:
|
||||
return
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
if device := device_registry.async_get_device(
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
sw_version=str(self._thermostat.device_data.firmware_version),
|
||||
sw_version=self._thermostat.device_data.firmware_version,
|
||||
serial_number=self._thermostat.device_data.device_serial.value,
|
||||
)
|
||||
|
||||
|
@ -213,7 +265,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
|||
self.async_write_ha_state()
|
||||
|
||||
try:
|
||||
await self._thermostat.async_set_temperature(temperature)
|
||||
await self._thermostat.async_set_temperature(self._target_temperature)
|
||||
except Eq3Exception:
|
||||
_LOGGER.error(
|
||||
"[%s] Failed setting temperature", self._eq3_config.mac_address
|
||||
|
|
|
@ -18,20 +18,9 @@ DOMAIN = "eq3btsmart"
|
|||
MANUFACTURER = "eQ-3 AG"
|
||||
DEVICE_MODEL = "CC-RT-BLE-EQ"
|
||||
|
||||
ENTITY_KEY_DST = "dst"
|
||||
ENTITY_KEY_BATTERY = "battery"
|
||||
ENTITY_KEY_WINDOW = "window"
|
||||
ENTITY_KEY_LOCK = "lock"
|
||||
ENTITY_KEY_BOOST = "boost"
|
||||
ENTITY_KEY_AWAY = "away"
|
||||
ENTITY_KEY_COMFORT = "comfort"
|
||||
ENTITY_KEY_ECO = "eco"
|
||||
ENTITY_KEY_OFFSET = "offset"
|
||||
ENTITY_KEY_WINDOW_OPEN_TEMPERATURE = "window_open_temperature"
|
||||
ENTITY_KEY_WINDOW_OPEN_TIMEOUT = "window_open_timeout"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
|
||||
|
||||
EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = {
|
||||
OperationMode.OFF: HVACMode.OFF,
|
||||
OperationMode.ON: HVACMode.HEAT,
|
||||
|
@ -82,5 +71,3 @@ DEFAULT_SCAN_INTERVAL = 10 # seconds
|
|||
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected"
|
||||
SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected"
|
||||
|
||||
EQ3BT_STEP = 0.5
|
||||
|
|
|
@ -1,22 +1,10 @@
|
|||
"""Base class for all eQ-3 entities."""
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
from eq3btsmart.thermostat import Thermostat
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEVICE_MODEL,
|
||||
MANUFACTURER,
|
||||
SIGNAL_THERMOSTAT_CONNECTED,
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .models import Eq3Config
|
||||
|
||||
|
||||
class Eq3Entity(Entity):
|
||||
|
@ -24,70 +12,8 @@ class Eq3Entity(Entity):
|
|||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: Eq3ConfigEntry,
|
||||
unique_id_key: str | None = None,
|
||||
) -> None:
|
||||
def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None:
|
||||
"""Initialize the eq3 entity."""
|
||||
|
||||
self._eq3_config = entry.runtime_data.eq3_config
|
||||
self._thermostat = entry.runtime_data.thermostat
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=slugify(self._eq3_config.mac_address),
|
||||
manufacturer=MANUFACTURER,
|
||||
model=DEVICE_MODEL,
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
)
|
||||
suffix = f"_{unique_id_key}" if unique_id_key else ""
|
||||
self._attr_unique_id = f"{format_mac(self._eq3_config.mac_address)}{suffix}"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_disconnected,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_connected,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_disconnected(self) -> None:
|
||||
"""Handle disconnection from the thermostat."""
|
||||
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_connected(self) -> None:
|
||||
"""Handle connection to the thermostat."""
|
||||
|
||||
self._attr_available = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available."""
|
||||
|
||||
return self._thermostat.status is not None and self._attr_available
|
||||
self._eq3_config = eq3_config
|
||||
self._thermostat = thermostat
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"dst": {
|
||||
"default": "mdi:sun-clock",
|
||||
"state": {
|
||||
"off": "mdi:sun-clock-outline"
|
||||
}
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"comfort": {
|
||||
"default": "mdi:sun-thermometer"
|
||||
},
|
||||
"eco": {
|
||||
"default": "mdi:snowflake-thermometer"
|
||||
},
|
||||
"offset": {
|
||||
"default": "mdi:thermometer-plus"
|
||||
},
|
||||
"window_open_temperature": {
|
||||
"default": "mdi:window-open-variant"
|
||||
},
|
||||
"window_open_timeout": {
|
||||
"default": "mdi:timer-refresh"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"away": {
|
||||
"default": "mdi:home-account",
|
||||
"state": {
|
||||
"on": "mdi:home-export"
|
||||
}
|
||||
},
|
||||
"lock": {
|
||||
"default": "mdi:lock",
|
||||
"state": {
|
||||
"off": "mdi:lock-off"
|
||||
}
|
||||
},
|
||||
"boost": {
|
||||
"default": "mdi:fire",
|
||||
"state": {
|
||||
"off": "mdi:fire-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -23,5 +23,5 @@
|
|||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.2.0", "bleak-esphome==1.1.0"]
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from eq3btsmart.const import DEFAULT_AWAY_HOURS, DEFAULT_AWAY_TEMP
|
||||
from eq3btsmart.thermostat import Thermostat
|
||||
|
||||
from .const import (
|
||||
|
@ -22,6 +23,8 @@ class Eq3Config:
|
|||
target_temp_selector: TargetTemperatureSelector = DEFAULT_TARGET_TEMP_SELECTOR
|
||||
external_temp_sensor: str = ""
|
||||
scan_interval: int = DEFAULT_SCAN_INTERVAL
|
||||
default_away_hours: float = DEFAULT_AWAY_HOURS
|
||||
default_away_temperature: float = DEFAULT_AWAY_TEMP
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
|
|
@ -1,158 +0,0 @@
|
|||
"""Platform for eq3 number entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import (
|
||||
EQ3BT_MAX_OFFSET,
|
||||
EQ3BT_MAX_TEMP,
|
||||
EQ3BT_MIN_OFFSET,
|
||||
EQ3BT_MIN_TEMP,
|
||||
)
|
||||
from eq3btsmart.models import Presets
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
ENTITY_KEY_COMFORT,
|
||||
ENTITY_KEY_ECO,
|
||||
ENTITY_KEY_OFFSET,
|
||||
ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
EQ3BT_STEP,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
"""Entity description for eq3 number entities."""
|
||||
|
||||
value_func: Callable[[Presets], float]
|
||||
value_set_func: Callable[
|
||||
[Thermostat],
|
||||
Callable[[float], Awaitable[None]],
|
||||
]
|
||||
mode: NumberMode = NumberMode.BOX
|
||||
entity_category: EntityCategory | None = EntityCategory.CONFIG
|
||||
|
||||
|
||||
NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_COMFORT,
|
||||
value_func=lambda presets: presets.comfort_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature,
|
||||
translation_key=ENTITY_KEY_COMFORT,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_ECO,
|
||||
value_func=lambda presets: presets.eco_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature,
|
||||
translation_key=ENTITY_KEY_ECO,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
value_func=lambda presets: presets.window_open_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_OFFSET,
|
||||
value_func=lambda presets: presets.offset_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset,
|
||||
translation_key=ENTITY_KEY_OFFSET,
|
||||
native_min_value=EQ3BT_MIN_OFFSET,
|
||||
native_max_value=EQ3BT_MAX_OFFSET,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration,
|
||||
value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
native_min_value=0,
|
||||
native_max_value=60,
|
||||
native_step=5,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the entry."""
|
||||
|
||||
async_add_entities(
|
||||
Eq3NumberEntity(entry, entity_description)
|
||||
for entity_description in NUMBER_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Base class for all eq3 number entities."""
|
||||
|
||||
entity_description: Eq3NumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self, entry: Eq3ConfigEntry, entity_description: Eq3NumberEntityDescription
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(entry, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the state of the entity."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
assert self._thermostat.status.presets is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status.presets)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the state of the entity."""
|
||||
|
||||
await self.entity_description.value_set_func(self._thermostat)(value)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return whether the entity is available."""
|
||||
|
||||
return (
|
||||
self._thermostat.status is not None
|
||||
and self._thermostat.status.presets is not None
|
||||
and self._attr_available
|
||||
)
|
|
@ -18,40 +18,5 @@
|
|||
"error": {
|
||||
"invalid_mac_address": "Invalid MAC address"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"dst": {
|
||||
"name": "Daylight saving time"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"comfort": {
|
||||
"name": "Comfort temperature"
|
||||
},
|
||||
"eco": {
|
||||
"name": "Eco temperature"
|
||||
},
|
||||
"offset": {
|
||||
"name": "Offset temperature"
|
||||
},
|
||||
"window_open_temperature": {
|
||||
"name": "Window open temperature"
|
||||
},
|
||||
"window_open_timeout": {
|
||||
"name": "Window open timeout"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"lock": {
|
||||
"name": "Lock"
|
||||
},
|
||||
"boost": {
|
||||
"name": "Boost"
|
||||
},
|
||||
"away": {
|
||||
"name": "Away"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,94 +0,0 @@
|
|||
"""Platform for eq3 switch entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.models import Status
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3SwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Entity description for eq3 switch entities."""
|
||||
|
||||
toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]]
|
||||
value_func: Callable[[Status], bool]
|
||||
|
||||
|
||||
SWITCH_ENTITY_DESCRIPTIONS = [
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_LOCK,
|
||||
translation_key=ENTITY_KEY_LOCK,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_locked,
|
||||
value_func=lambda status: status.is_locked,
|
||||
),
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_BOOST,
|
||||
translation_key=ENTITY_KEY_BOOST,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_boost,
|
||||
value_func=lambda status: status.is_boost,
|
||||
),
|
||||
Eq3SwitchEntityDescription(
|
||||
key=ENTITY_KEY_AWAY,
|
||||
translation_key=ENTITY_KEY_AWAY,
|
||||
toggle_func=lambda thermostat: thermostat.async_set_away,
|
||||
value_func=lambda status: status.is_away,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the entry."""
|
||||
|
||||
async_add_entities(
|
||||
Eq3SwitchEntity(entry, entity_description)
|
||||
for entity_description in SWITCH_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class Eq3SwitchEntity(Eq3Entity, SwitchEntity):
|
||||
"""Base class for eq3 switch entities."""
|
||||
|
||||
entity_description: Eq3SwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: Eq3ConfigEntry,
|
||||
entity_description: Eq3SwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(entry, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the switch."""
|
||||
|
||||
await self.entity_description.toggle_func(self._thermostat)(True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the switch."""
|
||||
|
||||
await self.entity_description.toggle_func(self._thermostat)(False)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the switch."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status)
|
|
@ -257,9 +257,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
self, discovery_info: MqttServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle MQTT discovery."""
|
||||
if not discovery_info.payload:
|
||||
return self.async_abort(reason="mqtt_missing_payload")
|
||||
|
||||
device_info = json_loads_object(discovery_info.payload)
|
||||
if "mac" not in device_info:
|
||||
return self.async_abort(reason="mqtt_missing_mac")
|
||||
|
|
|
@ -8,8 +8,7 @@
|
|||
"service_received": "Action received",
|
||||
"mqtt_missing_mac": "Missing MAC address in MQTT properties.",
|
||||
"mqtt_missing_api": "Missing API port in MQTT properties.",
|
||||
"mqtt_missing_ip": "Missing IP address in MQTT properties.",
|
||||
"mqtt_missing_payload": "Missing MQTT Payload."
|
||||
"mqtt_missing_ip": "Missing IP address in MQTT properties."
|
||||
},
|
||||
"error": {
|
||||
"resolve_error": "Can't resolve address of the ESP. If this error persists, please set a static IP address",
|
||||
|
|
|
@ -73,9 +73,11 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity):
|
|||
return self.data["version"]
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
def in_progress(self) -> bool | int | None:
|
||||
"""Update installation progress."""
|
||||
return bool(self.data["upgrade_in_progress"])
|
||||
if self.data["upgrade_in_progress"]:
|
||||
return self.data["upgrade_percent"]
|
||||
return False
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
|
@ -91,13 +93,6 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity):
|
|||
return self.data["latest_firmware_info"].get("desc")
|
||||
return None
|
||||
|
||||
@property
|
||||
def update_percentage(self) -> int | None:
|
||||
"""Update installation progress."""
|
||||
if self.data["upgrade_in_progress"]:
|
||||
return self.data["upgrade_percent"]
|
||||
return None
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
|
|
|
@ -4,5 +4,5 @@
|
|||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ffmpeg",
|
||||
"integration_type": "system",
|
||||
"requirements": ["ha-ffmpeg==3.2.2"]
|
||||
"requirements": ["ha-ffmpeg==3.2.1"]
|
||||
}
|
||||
|
|
|
@ -3,16 +3,88 @@
|
|||
from copy import deepcopy
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.components.notify import migrate_notify_issue
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_NAME,
|
||||
CONF_PLATFORM,
|
||||
CONF_SCAN_INTERVAL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
discovery,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .notify import PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA
|
||||
from .sensor import PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA
|
||||
|
||||
IMPORT_SCHEMA = {
|
||||
Platform.SENSOR: SENSOR_PLATFORM_SCHEMA,
|
||||
Platform.NOTIFY: NOTIFY_PLATFORM_SCHEMA,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the file integration."""
|
||||
|
||||
hass.data[DOMAIN] = config
|
||||
if hass.config_entries.async_entries(DOMAIN):
|
||||
# We skip import in case we already have config entries
|
||||
return True
|
||||
# The use of the legacy notify service was deprecated with HA Core 2024.6.0
|
||||
# and will be removed with HA Core 2024.12
|
||||
migrate_notify_issue(hass, DOMAIN, "File", "2024.12.0")
|
||||
# The YAML config was imported with HA Core 2024.6.0 and will be removed with
|
||||
# HA Core 2024.12
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
learn_more_url="https://www.home-assistant.io/integrations/file/",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "File",
|
||||
},
|
||||
)
|
||||
|
||||
# Import the YAML config into separate config entries
|
||||
platforms_config: dict[Platform, list[ConfigType]] = {
|
||||
domain: config[domain] for domain in PLATFORMS if domain in config
|
||||
}
|
||||
for domain, items in platforms_config.items():
|
||||
for item in items:
|
||||
if item[CONF_PLATFORM] == DOMAIN:
|
||||
file_config_item = IMPORT_SCHEMA[domain](item)
|
||||
file_config_item[CONF_PLATFORM] = domain
|
||||
if CONF_SCAN_INTERVAL in file_config_item:
|
||||
del file_config_item[CONF_SCAN_INTERVAL]
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=file_config_item,
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a file component entry."""
|
||||
config = {**entry.data, **entry.options}
|
||||
|
@ -30,6 +102,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
entry, [Platform(entry.data[CONF_PLATFORM])]
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data:
|
||||
# New notify entities are being setup through the config entry,
|
||||
# but during the deprecation period we want to keep the legacy notify platform,
|
||||
# so we forward the setup config through discovery.
|
||||
# Only the entities from yaml will still be available as legacy service.
|
||||
hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
hass,
|
||||
Platform.NOTIFY,
|
||||
DOMAIN,
|
||||
config,
|
||||
hass.data[DOMAIN],
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from copy import deepcopy
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
@ -15,6 +16,7 @@ from homeassistant.config_entries import (
|
|||
)
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_NAME,
|
||||
CONF_PLATFORM,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
|
@ -130,6 +132,27 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
"""Handle file sensor config flow."""
|
||||
return await self._async_handle_step(Platform.SENSOR.value, user_input)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import `file`` config from configuration.yaml."""
|
||||
self._async_abort_entries_match(import_data)
|
||||
platform = import_data[CONF_PLATFORM]
|
||||
name: str = import_data.get(CONF_NAME, DEFAULT_NAME)
|
||||
file_name: str
|
||||
if platform == Platform.NOTIFY:
|
||||
file_name = import_data.pop(CONF_FILENAME)
|
||||
file_path: str = os.path.join(self.hass.config.config_dir, file_name)
|
||||
import_data[CONF_FILE_PATH] = file_path
|
||||
else:
|
||||
file_path = import_data[CONF_FILE_PATH]
|
||||
title = f"{name} [{file_path}]"
|
||||
data = deepcopy(import_data)
|
||||
options = {}
|
||||
for key, value in import_data.items():
|
||||
if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME):
|
||||
data.pop(key)
|
||||
options[key] = value
|
||||
return self.async_create_entry(title=title, data=data, options=options)
|
||||
|
||||
|
||||
class FileOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle File options."""
|
||||
|
|
|
@ -2,23 +2,104 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, TextIO
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_TITLE,
|
||||
ATTR_TITLE_DEFAULT,
|
||||
PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA,
|
||||
BaseNotificationService,
|
||||
NotifyEntity,
|
||||
NotifyEntityFeature,
|
||||
migrate_notify_issue,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_NAME
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN, FILE_ICON
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The legacy platform schema uses a filename, after import
|
||||
# The full file path is stored in the config entry
|
||||
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILENAME): cv.string,
|
||||
vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> FileNotificationService | None:
|
||||
"""Get the file notification service."""
|
||||
if discovery_info is None:
|
||||
# We only set up through discovery
|
||||
return None
|
||||
file_path: str = discovery_info[CONF_FILE_PATH]
|
||||
timestamp: bool = discovery_info[CONF_TIMESTAMP]
|
||||
|
||||
return FileNotificationService(file_path, timestamp)
|
||||
|
||||
|
||||
class FileNotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for the File service."""
|
||||
|
||||
def __init__(self, file_path: str, add_timestamp: bool) -> None:
|
||||
"""Initialize the service."""
|
||||
self._file_path = file_path
|
||||
self.add_timestamp = add_timestamp
|
||||
|
||||
async def async_send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a file."""
|
||||
# The use of the legacy notify service was deprecated with HA Core 2024.6.0
|
||||
# and will be removed with HA Core 2024.12
|
||||
migrate_notify_issue(
|
||||
self.hass, DOMAIN, "File", "2024.12.0", service_name=self._service_name
|
||||
)
|
||||
await self.hass.async_add_executor_job(
|
||||
partial(self.send_message, message, **kwargs)
|
||||
)
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a file."""
|
||||
file: TextIO
|
||||
filepath = self._file_path
|
||||
try:
|
||||
with open(filepath, "a", encoding="utf8") as file:
|
||||
if os.stat(filepath).st_size == 0:
|
||||
title = (
|
||||
f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log"
|
||||
f" started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n"
|
||||
)
|
||||
file.write(title)
|
||||
|
||||
if self.add_timestamp:
|
||||
text = f"{dt_util.utcnow().isoformat()} {message}\n"
|
||||
else:
|
||||
text = f"{message}\n"
|
||||
file.write(text)
|
||||
except OSError as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="write_access_failed",
|
||||
translation_placeholders={"filename": filepath, "exc": f"{exc!r}"},
|
||||
) from exc
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
|
|
@ -6,8 +6,12 @@ import logging
|
|||
import os
|
||||
|
||||
from file_read_backwards import FileReadBackwards
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
|
@ -16,13 +20,38 @@ from homeassistant.const import (
|
|||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DEFAULT_NAME, FILE_ICON
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILE_PATH): cv.isfile,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the file sensor from YAML.
|
||||
|
||||
The YAML platform config is automatically
|
||||
imported to a config entry, this method can be removed
|
||||
when YAML support is removed.
|
||||
"""
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
},
|
||||
"data_description": {
|
||||
"file_path": "The local file path to retrieve the sensor value from",
|
||||
"value_template": "A template to render the sensors value based on the file content",
|
||||
"value_template": "A template to render the the sensors value based on the file content",
|
||||
"unit_of_measurement": "Unit of measurement for the sensor"
|
||||
}
|
||||
},
|
||||
|
|
|
@ -57,8 +57,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
VERSION = 1
|
||||
|
||||
_host: str
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
|
@ -69,6 +67,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize FRITZ!Box Tools flow."""
|
||||
self._host: str | None = None
|
||||
self._name: str = ""
|
||||
self._password: str = ""
|
||||
self._use_tls: bool = False
|
||||
|
@ -113,6 +112,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
async def async_check_configured_entry(self) -> ConfigEntry | None:
|
||||
"""Check if entry is configured."""
|
||||
assert self._host
|
||||
current_host = await self.hass.async_add_executor_job(
|
||||
socket.gethostbyname, self._host
|
||||
)
|
||||
|
@ -154,17 +154,15 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by discovery."""
|
||||
ssdp_location: ParseResult = urlparse(discovery_info.ssdp_location or "")
|
||||
host = ssdp_location.hostname
|
||||
if not host or ipaddress.ip_address(host).is_link_local:
|
||||
return self.async_abort(reason="ignore_ip6_link_local")
|
||||
|
||||
self._host = host
|
||||
self._host = ssdp_location.hostname
|
||||
self._name = (
|
||||
discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME)
|
||||
or discovery_info.upnp[ssdp.ATTR_UPNP_MODEL_NAME]
|
||||
)
|
||||
|
||||
uuid: str | None
|
||||
if not self._host or ipaddress.ip_address(self._host).is_link_local:
|
||||
return self.async_abort(reason="ignore_ip6_link_local")
|
||||
|
||||
if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN):
|
||||
if uuid.startswith("uuid:"):
|
||||
uuid = uuid[5:]
|
||||
|
|
|
@ -43,11 +43,10 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
VERSION = 1
|
||||
|
||||
_name: str
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize flow."""
|
||||
self._host: str | None = None
|
||||
self._name: str | None = None
|
||||
self._password: str | None = None
|
||||
self._username: str | None = None
|
||||
|
||||
|
@ -159,6 +158,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
result = await self.async_try_connect()
|
||||
|
||||
if result == RESULT_SUCCESS:
|
||||
assert self._name is not None
|
||||
return self._get_entry(self._name)
|
||||
if result != RESULT_INVALID_AUTH:
|
||||
return self.async_abort(reason=result)
|
||||
|
|
|
@ -20,5 +20,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20241106.2"]
|
||||
"requirements": ["home-assistant-frontend==20241106.0"]
|
||||
}
|
||||
|
|
|
@ -7,5 +7,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==13.1.0", "Pillow==11.0.0"]
|
||||
"requirements": ["av==13.1.0", "Pillow==10.4.0"]
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"step": {
|
||||
"user": {
|
||||
"title": "Add generic hygrostat",
|
||||
"description": "Create a humidifier entity that control the humidity via a switch and sensor.",
|
||||
"description": "Create a entity that control the humidity via a switch and sensor.",
|
||||
"data": {
|
||||
"device_class": "Device class",
|
||||
"dry_tolerance": "Dry tolerance",
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Add generic thermostat",
|
||||
"title": "Add generic thermostat helper",
|
||||
"description": "Create a climate entity that controls the temperature via a switch and sensor.",
|
||||
"data": {
|
||||
"ac_mode": "Cooling mode",
|
||||
|
@ -17,8 +17,8 @@
|
|||
"data_description": {
|
||||
"ac_mode": "Set the actuator specified to be treated as a cooling device instead of a heating device.",
|
||||
"heater": "Switch entity used to cool or heat depending on A/C mode.",
|
||||
"target_sensor": "Temperature sensor that reflects the current temperature.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.",
|
||||
"target_sensor": "Temperature sensor that reflect the current temperature.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on. This option will be ignored if the keep alive option is set.",
|
||||
"cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor equals or goes below 24.5.",
|
||||
"hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5."
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import aiohttp
|
|||
from geniushubclient import GeniusHub
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
|
@ -20,12 +21,20 @@ from homeassistant.const import (
|
|||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.service import verify_domain_control
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
@ -36,6 +45,27 @@ SCAN_INTERVAL = timedelta(seconds=60)
|
|||
|
||||
MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$"
|
||||
|
||||
CLOUD_API_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOKEN): cv.string,
|
||||
vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
LOCAL_API_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP),
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.Any(LOCAL_API_SCHEMA, CLOUD_API_SCHEMA)}, extra=vol.ALLOW_EXTRA
|
||||
)
|
||||
|
||||
ATTR_ZONE_MODE = "mode"
|
||||
ATTR_DURATION = "duration"
|
||||
|
||||
|
@ -70,6 +100,56 @@ PLATFORMS = [
|
|||
]
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, base_config: ConfigType) -> None:
|
||||
"""Import a config entry from configuration.yaml."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_IMPORT},
|
||||
data=base_config[DOMAIN],
|
||||
)
|
||||
if (
|
||||
result["type"] is FlowResultType.CREATE_ENTRY
|
||||
or result["reason"] == "already_configured"
|
||||
):
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Genius Hub",
|
||||
},
|
||||
)
|
||||
return
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2024.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Genius Hub",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool:
|
||||
"""Set up a Genius Hub system."""
|
||||
if DOMAIN in base_config:
|
||||
hass.async_create_task(_async_import(hass, base_config))
|
||||
return True
|
||||
|
||||
|
||||
type GeniusHubConfigEntry = ConfigEntry[GeniusBroker]
|
||||
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import voluptuous as vol
|
|||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
@ -122,3 +123,14 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
return self.async_show_form(
|
||||
step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import the yaml config."""
|
||||
if CONF_HOST in import_data:
|
||||
result = await self.async_step_local_api(import_data)
|
||||
else:
|
||||
result = await self.async_step_cloud_api(import_data)
|
||||
if result["type"] is FlowResultType.FORM:
|
||||
assert result["errors"]
|
||||
return self.async_abort(reason=result["errors"]["base"])
|
||||
return result
|
||||
|
|
|
@ -4,7 +4,6 @@ import logging
|
|||
import shutil
|
||||
|
||||
from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError
|
||||
from awesomeversion import AwesomeVersion
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
from go2rtc_client.exceptions import Go2RtcClientError, Go2RtcVersionError
|
||||
from go2rtc_client.ws import (
|
||||
|
@ -33,23 +32,13 @@ from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
|||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
discovery_flow,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.package import is_docker_env
|
||||
|
||||
from .const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_URL,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
from .const import CONF_DEBUG_UI, DEBUG_UI_URL_MESSAGE, DOMAIN, HA_MANAGED_URL
|
||||
from .server import Server
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@ -158,21 +147,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
# Validate the server URL
|
||||
try:
|
||||
client = Go2RtcRestClient(async_get_clientsession(hass), url)
|
||||
version = await client.validate_server_version()
|
||||
if version < AwesomeVersion(RECOMMENDED_VERSION):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"recommended_version",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="recommended_version",
|
||||
translation_placeholders={
|
||||
"recommended_version": RECOMMENDED_VERSION,
|
||||
"current_version": str(version),
|
||||
},
|
||||
)
|
||||
await client.validate_server_version()
|
||||
except Go2RtcClientError as err:
|
||||
if isinstance(err.__cause__, _RETRYABLE_ERRORS):
|
||||
raise ConfigEntryNotReady(
|
||||
|
@ -249,13 +224,7 @@ class WebRTCProvider(CameraWebRTCProvider):
|
|||
):
|
||||
await self._rest_client.streams.add(
|
||||
camera.entity_id,
|
||||
[
|
||||
stream_source,
|
||||
# We are setting any ffmpeg rtsp related logs to debug
|
||||
# Connection problems to the camera will be logged by the first stream
|
||||
# Therefore setting it to debug will not hide any important logs
|
||||
f"ffmpeg:{camera.entity_id}#audio=opus#query=log_level=debug",
|
||||
],
|
||||
[stream_source, f"ffmpeg:{camera.entity_id}#audio=opus"],
|
||||
)
|
||||
|
||||
@callback
|
||||
|
|
|
@ -6,4 +6,3 @@ CONF_DEBUG_UI = "debug_ui"
|
|||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
RECOMMENDED_VERSION = "1.9.7"
|
||||
|
|
|
@ -7,6 +7,6 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/go2rtc",
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["go2rtc-client==0.1.1"],
|
||||
"requirements": ["go2rtc-client==0.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
|
|
@ -24,15 +24,14 @@ _RESPAWN_COOLDOWN = 1
|
|||
|
||||
# Default configuration for HA
|
||||
# - Api is listening only on localhost
|
||||
# - Enable rtsp for localhost only as ffmpeg needs it
|
||||
# - Disable rtsp listener
|
||||
# - Clear default ice servers
|
||||
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
||||
# Do not edit it manually
|
||||
|
||||
_GO2RTC_CONFIG_FORMAT = r"""
|
||||
api:
|
||||
listen: "{api_ip}:{api_port}"
|
||||
|
||||
rtsp:
|
||||
# ffmpeg needs rtsp for opus audio transcoding
|
||||
listen: "127.0.0.1:18554"
|
||||
|
||||
webrtc:
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"issues": {
|
||||
"recommended_version": {
|
||||
"title": "Outdated go2rtc server detected",
|
||||
"description": "We detected that you are using an outdated go2rtc server version. For the best experience, we recommend updating the go2rtc server to version `{recommended_version}`.\nCurrently you are using version `{current_version}`."
|
||||
}
|
||||
}
|
||||
}
|
|
@ -87,8 +87,8 @@
|
|||
}
|
||||
},
|
||||
"create_event": {
|
||||
"name": "Create event",
|
||||
"description": "Adds a new calendar event.",
|
||||
"name": "Creates event",
|
||||
"description": "Add a new calendar event.",
|
||||
"fields": {
|
||||
"summary": {
|
||||
"name": "Summary",
|
||||
|
|
|
@ -78,7 +78,6 @@ TYPE_AWNING = f"{PREFIX_TYPES}AWNING"
|
|||
TYPE_BLINDS = f"{PREFIX_TYPES}BLINDS"
|
||||
TYPE_CAMERA = f"{PREFIX_TYPES}CAMERA"
|
||||
TYPE_CURTAIN = f"{PREFIX_TYPES}CURTAIN"
|
||||
TYPE_CARBON_MONOXIDE_DETECTOR = f"{PREFIX_TYPES}CARBON_MONOXIDE_DETECTOR"
|
||||
TYPE_DEHUMIDIFIER = f"{PREFIX_TYPES}DEHUMIDIFIER"
|
||||
TYPE_DOOR = f"{PREFIX_TYPES}DOOR"
|
||||
TYPE_DOORBELL = f"{PREFIX_TYPES}DOORBELL"
|
||||
|
@ -94,7 +93,6 @@ TYPE_SCENE = f"{PREFIX_TYPES}SCENE"
|
|||
TYPE_SENSOR = f"{PREFIX_TYPES}SENSOR"
|
||||
TYPE_SETTOP = f"{PREFIX_TYPES}SETTOP"
|
||||
TYPE_SHUTTER = f"{PREFIX_TYPES}SHUTTER"
|
||||
TYPE_SMOKE_DETECTOR = f"{PREFIX_TYPES}SMOKE_DETECTOR"
|
||||
TYPE_SPEAKER = f"{PREFIX_TYPES}SPEAKER"
|
||||
TYPE_SWITCH = f"{PREFIX_TYPES}SWITCH"
|
||||
TYPE_THERMOSTAT = f"{PREFIX_TYPES}THERMOSTAT"
|
||||
|
@ -138,7 +136,6 @@ EVENT_SYNC_RECEIVED = "google_assistant_sync"
|
|||
|
||||
DOMAIN_TO_GOOGLE_TYPES = {
|
||||
alarm_control_panel.DOMAIN: TYPE_ALARM,
|
||||
binary_sensor.DOMAIN: TYPE_SENSOR,
|
||||
button.DOMAIN: TYPE_SCENE,
|
||||
camera.DOMAIN: TYPE_CAMERA,
|
||||
climate.DOMAIN: TYPE_THERMOSTAT,
|
||||
|
@ -171,14 +168,6 @@ DEVICE_CLASS_TO_GOOGLE_TYPES = {
|
|||
binary_sensor.DOMAIN,
|
||||
binary_sensor.BinarySensorDeviceClass.GARAGE_DOOR,
|
||||
): TYPE_GARAGE,
|
||||
(
|
||||
binary_sensor.DOMAIN,
|
||||
binary_sensor.BinarySensorDeviceClass.SMOKE,
|
||||
): TYPE_SMOKE_DETECTOR,
|
||||
(
|
||||
binary_sensor.DOMAIN,
|
||||
binary_sensor.BinarySensorDeviceClass.CO,
|
||||
): TYPE_CARBON_MONOXIDE_DETECTOR,
|
||||
(cover.DOMAIN, cover.CoverDeviceClass.AWNING): TYPE_AWNING,
|
||||
(cover.DOMAIN, cover.CoverDeviceClass.CURTAIN): TYPE_CURTAIN,
|
||||
(cover.DOMAIN, cover.CoverDeviceClass.DOOR): TYPE_DOOR,
|
||||
|
|
|
@ -2706,21 +2706,6 @@ class SensorStateTrait(_Trait):
|
|||
),
|
||||
}
|
||||
|
||||
binary_sensor_types = {
|
||||
binary_sensor.BinarySensorDeviceClass.CO: (
|
||||
"CarbonMonoxideLevel",
|
||||
["carbon monoxide detected", "no carbon monoxide detected", "unknown"],
|
||||
),
|
||||
binary_sensor.BinarySensorDeviceClass.SMOKE: (
|
||||
"SmokeLevel",
|
||||
["smoke detected", "no smoke detected", "unknown"],
|
||||
),
|
||||
binary_sensor.BinarySensorDeviceClass.MOISTURE: (
|
||||
"WaterLeak",
|
||||
["leak", "no leak", "unknown"],
|
||||
),
|
||||
}
|
||||
|
||||
name = TRAIT_SENSOR_STATE
|
||||
commands: list[str] = []
|
||||
|
||||
|
@ -2743,37 +2728,24 @@ class SensorStateTrait(_Trait):
|
|||
@classmethod
|
||||
def supported(cls, domain, features, device_class, _):
|
||||
"""Test if state is supported."""
|
||||
return (domain == sensor.DOMAIN and device_class in cls.sensor_types) or (
|
||||
domain == binary_sensor.DOMAIN and device_class in cls.binary_sensor_types
|
||||
)
|
||||
return domain == sensor.DOMAIN and device_class in cls.sensor_types
|
||||
|
||||
def sync_attributes(self) -> dict[str, Any]:
|
||||
"""Return attributes for a sync request."""
|
||||
device_class = self.state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
data = self.sensor_types.get(device_class)
|
||||
|
||||
def create_sensor_state(
|
||||
name: str,
|
||||
raw_value_unit: str | None = None,
|
||||
available_states: list[str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
sensor_state: dict[str, Any] = {
|
||||
"name": name,
|
||||
}
|
||||
if raw_value_unit:
|
||||
sensor_state["numericCapabilities"] = {"rawValueUnit": raw_value_unit}
|
||||
if available_states:
|
||||
sensor_state["descriptiveCapabilities"] = {
|
||||
"availableStates": available_states
|
||||
}
|
||||
return {"sensorStatesSupported": [sensor_state]}
|
||||
if device_class is None or data is None:
|
||||
return {}
|
||||
|
||||
if self.state.domain == sensor.DOMAIN:
|
||||
sensor_data = self.sensor_types.get(device_class)
|
||||
if device_class is None or sensor_data is None:
|
||||
return {}
|
||||
available_states: list[str] | None = None
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
available_states = [
|
||||
sensor_state = {
|
||||
"name": data[0],
|
||||
"numericCapabilities": {"rawValueUnit": data[1]},
|
||||
}
|
||||
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
sensor_state["descriptiveCapabilities"] = {
|
||||
"availableStates": [
|
||||
"healthy",
|
||||
"moderate",
|
||||
"unhealthy for sensitive groups",
|
||||
|
@ -2781,53 +2753,30 @@ class SensorStateTrait(_Trait):
|
|||
"very unhealthy",
|
||||
"hazardous",
|
||||
"unknown",
|
||||
]
|
||||
return create_sensor_state(sensor_data[0], sensor_data[1], available_states)
|
||||
binary_sensor_data = self.binary_sensor_types.get(device_class)
|
||||
if device_class is None or binary_sensor_data is None:
|
||||
return {}
|
||||
return create_sensor_state(
|
||||
binary_sensor_data[0], available_states=binary_sensor_data[1]
|
||||
)
|
||||
],
|
||||
}
|
||||
|
||||
return {"sensorStatesSupported": [sensor_state]}
|
||||
|
||||
def query_attributes(self) -> dict[str, Any]:
|
||||
"""Return the attributes of this trait for this entity."""
|
||||
device_class = self.state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
data = self.sensor_types.get(device_class)
|
||||
|
||||
def create_sensor_state(
|
||||
name: str, raw_value: float | None = None, current_state: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
sensor_state: dict[str, Any] = {
|
||||
"name": name,
|
||||
"rawValue": raw_value,
|
||||
}
|
||||
if current_state:
|
||||
sensor_state["currentSensorState"] = current_state
|
||||
return {"currentSensorStateData": [sensor_state]}
|
||||
|
||||
if self.state.domain == sensor.DOMAIN:
|
||||
sensor_data = self.sensor_types.get(device_class)
|
||||
if device_class is None or sensor_data is None:
|
||||
return {}
|
||||
try:
|
||||
value = float(self.state.state)
|
||||
except ValueError:
|
||||
value = None
|
||||
if self.state.state == STATE_UNKNOWN:
|
||||
value = None
|
||||
current_state: str | None = None
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
current_state = self._air_quality_description_for_aqi(value)
|
||||
return create_sensor_state(sensor_data[0], value, current_state)
|
||||
|
||||
binary_sensor_data = self.binary_sensor_types.get(device_class)
|
||||
if device_class is None or binary_sensor_data is None:
|
||||
if device_class is None or data is None:
|
||||
return {}
|
||||
value = {
|
||||
STATE_ON: 0,
|
||||
STATE_OFF: 1,
|
||||
STATE_UNKNOWN: 2,
|
||||
}[self.state.state]
|
||||
return create_sensor_state(
|
||||
binary_sensor_data[0], current_state=binary_sensor_data[1][value]
|
||||
)
|
||||
|
||||
try:
|
||||
value = float(self.state.state)
|
||||
except ValueError:
|
||||
value = None
|
||||
if self.state.state == STATE_UNKNOWN:
|
||||
value = None
|
||||
sensor_data = {"name": data[0], "rawValue": value}
|
||||
|
||||
if device_class == sensor.SensorDeviceClass.AQI:
|
||||
sensor_data["currentSensorState"] = self._air_quality_description_for_aqi(
|
||||
value
|
||||
)
|
||||
|
||||
return {"currentSensorStateData": [sensor_data]}
|
||||
|
|
|
@ -25,16 +25,7 @@ UNIT_TASKS = "tasks"
|
|||
ATTR_CONFIG_ENTRY = "config_entry"
|
||||
ATTR_SKILL = "skill"
|
||||
ATTR_TASK = "task"
|
||||
ATTR_DIRECTION = "direction"
|
||||
SERVICE_CAST_SKILL = "cast_skill"
|
||||
SERVICE_START_QUEST = "start_quest"
|
||||
SERVICE_ACCEPT_QUEST = "accept_quest"
|
||||
SERVICE_CANCEL_QUEST = "cancel_quest"
|
||||
SERVICE_ABORT_QUEST = "abort_quest"
|
||||
SERVICE_REJECT_QUEST = "reject_quest"
|
||||
SERVICE_LEAVE_QUEST = "leave_quest"
|
||||
SERVICE_SCORE_HABIT = "score_habit"
|
||||
SERVICE_SCORE_REWARD = "score_reward"
|
||||
|
||||
WARRIOR = "warrior"
|
||||
ROGUE = "rogue"
|
||||
|
|
|
@ -51,17 +51,12 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
|
|||
),
|
||||
)
|
||||
self.api = habitipy
|
||||
self.content: dict[str, Any] = {}
|
||||
|
||||
async def _async_update_data(self) -> HabiticaData:
|
||||
try:
|
||||
user_response = await self.api.user.get()
|
||||
tasks_response = await self.api.tasks.user.get()
|
||||
tasks_response.extend(await self.api.tasks.user.get(type="completedTodos"))
|
||||
if not self.content:
|
||||
self.content = await self.api.content.get(
|
||||
language=user_response["preferences"]["language"]
|
||||
)
|
||||
except ClientResponseError as error:
|
||||
if error.status == HTTPStatus.TOO_MANY_REQUESTS:
|
||||
_LOGGER.debug("Rate limit exceeded, will try again later")
|
||||
|
|
|
@ -126,18 +126,6 @@
|
|||
},
|
||||
"rewards": {
|
||||
"default": "mdi:treasure-chest"
|
||||
},
|
||||
"strength": {
|
||||
"default": "mdi:arm-flex-outline"
|
||||
},
|
||||
"intelligence": {
|
||||
"default": "mdi:head-snowflake-outline"
|
||||
},
|
||||
"perception": {
|
||||
"default": "mdi:eye-outline"
|
||||
},
|
||||
"constitution": {
|
||||
"default": "mdi:run-fast"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
@ -163,30 +151,6 @@
|
|||
},
|
||||
"cast_skill": {
|
||||
"service": "mdi:creation-outline"
|
||||
},
|
||||
"accept_quest": {
|
||||
"service": "mdi:script-text"
|
||||
},
|
||||
"reject_quest": {
|
||||
"service": "mdi:script-text"
|
||||
},
|
||||
"leave_quest": {
|
||||
"service": "mdi:script-text"
|
||||
},
|
||||
"abort_quest": {
|
||||
"service": "mdi:script-text-key"
|
||||
},
|
||||
"cancel_quest": {
|
||||
"service": "mdi:script-text-key"
|
||||
},
|
||||
"start_quest": {
|
||||
"service": "mdi:script-text-key"
|
||||
},
|
||||
"score_habit": {
|
||||
"service": "mdi:counter"
|
||||
},
|
||||
"score_reward": {
|
||||
"service": "mdi:sack"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ from homeassistant.helpers.typing import StateType
|
|||
from .const import DOMAIN, UNIT_TASKS
|
||||
from .entity import HabiticaBase
|
||||
from .types import HabiticaConfigEntry
|
||||
from .util import entity_used_in, get_attribute_points, get_attributes_total
|
||||
from .util import entity_used_in
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -36,10 +36,7 @@ _LOGGER = logging.getLogger(__name__)
|
|||
class HabitipySensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitipy Sensor Description."""
|
||||
|
||||
value_fn: Callable[[dict[str, Any], dict[str, Any]], StateType]
|
||||
attributes_fn: (
|
||||
Callable[[dict[str, Any], dict[str, Any]], dict[str, Any] | None] | None
|
||||
) = None
|
||||
value_fn: Callable[[dict[str, Any]], StateType]
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
|
@ -68,80 +65,76 @@ class HabitipySensorEntity(StrEnum):
|
|||
REWARDS = "rewards"
|
||||
GEMS = "gems"
|
||||
TRINKETS = "trinkets"
|
||||
STRENGTH = "strength"
|
||||
INTELLIGENCE = "intelligence"
|
||||
CONSTITUTION = "constitution"
|
||||
PERCEPTION = "perception"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.DISPLAY_NAME,
|
||||
translation_key=HabitipySensorEntity.DISPLAY_NAME,
|
||||
value_fn=lambda user, _: user.get("profile", {}).get("name"),
|
||||
value_fn=lambda user: user.get("profile", {}).get("name"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.HEALTH,
|
||||
translation_key=HabitipySensorEntity.HEALTH,
|
||||
native_unit_of_measurement="HP",
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("hp"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("hp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.HEALTH_MAX,
|
||||
translation_key=HabitipySensorEntity.HEALTH_MAX,
|
||||
native_unit_of_measurement="HP",
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("maxHealth"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.MANA,
|
||||
translation_key=HabitipySensorEntity.MANA,
|
||||
native_unit_of_measurement="MP",
|
||||
suggested_display_precision=0,
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("mp"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("mp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.MANA_MAX,
|
||||
translation_key=HabitipySensorEntity.MANA_MAX,
|
||||
native_unit_of_measurement="MP",
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("maxMP"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("maxMP"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.EXPERIENCE,
|
||||
translation_key=HabitipySensorEntity.EXPERIENCE,
|
||||
native_unit_of_measurement="XP",
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("exp"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("exp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.EXPERIENCE_MAX,
|
||||
translation_key=HabitipySensorEntity.EXPERIENCE_MAX,
|
||||
native_unit_of_measurement="XP",
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("toNextLevel"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.LEVEL,
|
||||
translation_key=HabitipySensorEntity.LEVEL,
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("lvl"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("lvl"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.GOLD,
|
||||
translation_key=HabitipySensorEntity.GOLD,
|
||||
native_unit_of_measurement="GP",
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("gp"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("gp"),
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.CLASS,
|
||||
translation_key=HabitipySensorEntity.CLASS,
|
||||
value_fn=lambda user, _: user.get("stats", {}).get("class"),
|
||||
value_fn=lambda user: user.get("stats", {}).get("class"),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["warrior", "healer", "wizard", "rogue"],
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.GEMS,
|
||||
translation_key=HabitipySensorEntity.GEMS,
|
||||
value_fn=lambda user, _: user.get("balance", 0) * 4,
|
||||
value_fn=lambda user: user.get("balance", 0) * 4,
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="gems",
|
||||
),
|
||||
|
@ -149,7 +142,7 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
|
|||
key=HabitipySensorEntity.TRINKETS,
|
||||
translation_key=HabitipySensorEntity.TRINKETS,
|
||||
value_fn=(
|
||||
lambda user, _: user.get("purchased", {})
|
||||
lambda user: user.get("purchased", {})
|
||||
.get("plan", {})
|
||||
.get("consecutive", {})
|
||||
.get("trinkets", 0)
|
||||
|
@ -157,38 +150,6 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
|
|||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="⧖",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.STRENGTH,
|
||||
translation_key=HabitipySensorEntity.STRENGTH,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "str"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "str"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="STR",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.INTELLIGENCE,
|
||||
translation_key=HabitipySensorEntity.INTELLIGENCE,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "int"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "int"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="INT",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.PERCEPTION,
|
||||
translation_key=HabitipySensorEntity.PERCEPTION,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "per"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "per"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="PER",
|
||||
),
|
||||
HabitipySensorEntityDescription(
|
||||
key=HabitipySensorEntity.CONSTITUTION,
|
||||
translation_key=HabitipySensorEntity.CONSTITUTION,
|
||||
value_fn=lambda user, content: get_attributes_total(user, content, "con"),
|
||||
attributes_fn=lambda user, content: get_attribute_points(user, content, "con"),
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="CON",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -282,16 +243,7 @@ class HabitipySensor(HabiticaBase, SensorEntity):
|
|||
def native_value(self) -> StateType:
|
||||
"""Return the state of the device."""
|
||||
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data.user, self.coordinator.content
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, float | None] | None:
|
||||
"""Return entity specific state attributes."""
|
||||
if func := self.entity_description.attributes_fn:
|
||||
return func(self.coordinator.data.user, self.coordinator.content)
|
||||
return None
|
||||
return self.entity_description.value_fn(self.coordinator.data.user)
|
||||
|
||||
|
||||
class HabitipyTaskSensor(HabiticaBase, SensorEntity):
|
||||
|
|
|
@ -19,29 +19,19 @@ from homeassistant.core import (
|
|||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
|
||||
from .const import (
|
||||
ATTR_ARGS,
|
||||
ATTR_CONFIG_ENTRY,
|
||||
ATTR_DATA,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_PATH,
|
||||
ATTR_SKILL,
|
||||
ATTR_TASK,
|
||||
DOMAIN,
|
||||
EVENT_API_CALL_SUCCESS,
|
||||
SERVICE_ABORT_QUEST,
|
||||
SERVICE_ACCEPT_QUEST,
|
||||
SERVICE_API_CALL,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_CAST_SKILL,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
SERVICE_SCORE_HABIT,
|
||||
SERVICE_SCORE_REWARD,
|
||||
SERVICE_START_QUEST,
|
||||
)
|
||||
from .types import HabiticaConfigEntry
|
||||
|
||||
|
@ -64,19 +54,6 @@ SERVICE_CAST_SKILL_SCHEMA = vol.Schema(
|
|||
}
|
||||
)
|
||||
|
||||
SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
}
|
||||
)
|
||||
SERVICE_SCORE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_DIRECTION): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
|
@ -93,23 +70,10 @@ def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
|||
return entry
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Habitica integration."""
|
||||
|
||||
async def handle_api_call(call: ServiceCall) -> None:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_api_call",
|
||||
breaks_in_ha_version="2025.6.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_api_call",
|
||||
)
|
||||
_LOGGER.warning(
|
||||
"Deprecated action called: 'habitica.api_call' is deprecated and will be removed in Home Assistant version 2025.6.0"
|
||||
)
|
||||
|
||||
name = call.data[ATTR_NAME]
|
||||
path = call.data[ATTR_PATH]
|
||||
entries = hass.config_entries.async_entries(DOMAIN)
|
||||
|
@ -196,104 +160,6 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
|||
await coordinator.async_request_refresh()
|
||||
return response
|
||||
|
||||
async def manage_quests(call: ServiceCall) -> ServiceResponse:
|
||||
"""Accept, reject, start, leave or cancel quests."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
COMMAND_MAP = {
|
||||
SERVICE_ABORT_QUEST: "abort",
|
||||
SERVICE_ACCEPT_QUEST: "accept",
|
||||
SERVICE_CANCEL_QUEST: "cancel",
|
||||
SERVICE_LEAVE_QUEST: "leave",
|
||||
SERVICE_REJECT_QUEST: "reject",
|
||||
SERVICE_START_QUEST: "force-start",
|
||||
}
|
||||
try:
|
||||
return await coordinator.api.groups.party.quests[
|
||||
COMMAND_MAP[call.service]
|
||||
].post()
|
||||
except ClientResponseError as e:
|
||||
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
) from e
|
||||
if e.status == HTTPStatus.UNAUTHORIZED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN, translation_key="quest_action_unallowed"
|
||||
) from e
|
||||
if e.status == HTTPStatus.NOT_FOUND:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN, translation_key="quest_not_found"
|
||||
) from e
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="service_call_exception"
|
||||
) from e
|
||||
|
||||
for service in (
|
||||
SERVICE_ABORT_QUEST,
|
||||
SERVICE_ACCEPT_QUEST,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
SERVICE_START_QUEST,
|
||||
):
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
service,
|
||||
manage_quests,
|
||||
schema=SERVICE_MANAGE_QUEST_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
async def score_task(call: ServiceCall) -> ServiceResponse:
|
||||
"""Score a task action."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
try:
|
||||
task_id, task_value = next(
|
||||
(task["id"], task.get("value"))
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (task["id"], task.get("alias"))
|
||||
or call.data[ATTR_TASK] == task["text"]
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
|
||||
try:
|
||||
response: dict[str, Any] = (
|
||||
await coordinator.api.tasks[task_id]
|
||||
.score[call.data.get(ATTR_DIRECTION, "up")]
|
||||
.post()
|
||||
)
|
||||
except ClientResponseError as e:
|
||||
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
) from e
|
||||
if e.status == HTTPStatus.UNAUTHORIZED and task_value is not None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_enough_gold",
|
||||
translation_placeholders={
|
||||
"gold": f"{coordinator.data.user["stats"]["gp"]:.2f} GP",
|
||||
"cost": f"{task_value} GP",
|
||||
},
|
||||
) from e
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
) from e
|
||||
else:
|
||||
await coordinator.async_request_refresh()
|
||||
return response
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_API_CALL,
|
||||
|
@ -308,18 +174,3 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
|||
schema=SERVICE_CAST_SKILL_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SCORE_HABIT,
|
||||
score_task,
|
||||
schema=SERVICE_SCORE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SCORE_REWARD,
|
||||
score_task,
|
||||
schema=SERVICE_SCORE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue