Compare commits
2 commits
dev
...
frenck-202
Author | SHA1 | Date | |
---|---|---|---|
|
ed4aa93ee8 | ||
|
284fc94d31 |
15134 changed files with 333116 additions and 956038 deletions
|
@ -14,7 +14,6 @@ core: &core
|
|||
base_platforms: &base_platforms
|
||||
- homeassistant/components/air_quality/**
|
||||
- homeassistant/components/alarm_control_panel/**
|
||||
- homeassistant/components/assist_satellite/**
|
||||
- homeassistant/components/binary_sensor/**
|
||||
- homeassistant/components/button/**
|
||||
- homeassistant/components/calendar/**
|
||||
|
@ -50,7 +49,6 @@ base_platforms: &base_platforms
|
|||
- homeassistant/components/tts/**
|
||||
- homeassistant/components/update/**
|
||||
- homeassistant/components/vacuum/**
|
||||
- homeassistant/components/valve/**
|
||||
- homeassistant/components/water_heater/**
|
||||
- homeassistant/components/weather/**
|
||||
|
||||
|
@ -62,7 +60,6 @@ components: &components
|
|||
- homeassistant/components/auth/**
|
||||
- homeassistant/components/automation/**
|
||||
- homeassistant/components/backup/**
|
||||
- homeassistant/components/blueprint/**
|
||||
- homeassistant/components/bluetooth/**
|
||||
- homeassistant/components/cloud/**
|
||||
- homeassistant/components/config/**
|
||||
|
@ -79,7 +76,6 @@ components: &components
|
|||
- homeassistant/components/group/**
|
||||
- homeassistant/components/hassio/**
|
||||
- homeassistant/components/homeassistant/**
|
||||
- homeassistant/components/homeassistant_hardware/**
|
||||
- homeassistant/components/http/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/input_boolean/**
|
||||
|
@ -112,7 +108,6 @@ components: &components
|
|||
- homeassistant/components/tag/**
|
||||
- homeassistant/components/template/**
|
||||
- homeassistant/components/timer/**
|
||||
- homeassistant/components/trace/**
|
||||
- homeassistant/components/usb/**
|
||||
- homeassistant/components/webhook/**
|
||||
- homeassistant/components/websocket_api/**
|
||||
|
@ -125,21 +120,21 @@ tests: &tests
|
|||
- pylint/**
|
||||
- requirements_test_pre_commit.txt
|
||||
- requirements_test.txt
|
||||
- tests/*.py
|
||||
- tests/auth/**
|
||||
- tests/backports/**
|
||||
- tests/components/conftest.py
|
||||
- tests/components/diagnostics/**
|
||||
- tests/common.py
|
||||
- tests/components/history/**
|
||||
- tests/components/logbook/**
|
||||
- tests/components/recorder/**
|
||||
- tests/components/repairs/**
|
||||
- tests/components/sensor/**
|
||||
- tests/conftest.py
|
||||
- tests/hassfest/**
|
||||
- tests/helpers/**
|
||||
- tests/ignore_uncaught_exceptions.py
|
||||
- tests/mock/**
|
||||
- tests/pylint/**
|
||||
- tests/scripts/**
|
||||
- tests/syrupy.py
|
||||
- tests/test_util/**
|
||||
- tests/testing_config/**
|
||||
- tests/util/**
|
||||
|
@ -153,7 +148,6 @@ requirements: &requirements
|
|||
- homeassistant/package_constraints.txt
|
||||
- requirements*.txt
|
||||
- pyproject.toml
|
||||
- script/licenses.py
|
||||
|
||||
any:
|
||||
- *base_platforms
|
||||
|
|
1715
.coveragerc
Normal file
1715
.coveragerc
Normal file
File diff suppressed because it is too large
Load diff
|
@ -2,22 +2,12 @@
|
|||
"name": "Home Assistant Dev",
|
||||
"context": "..",
|
||||
"dockerFile": "../Dockerfile.dev",
|
||||
"postCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder} && script/setup",
|
||||
"postCreateCommand": "script/setup",
|
||||
"postStartCommand": "script/bootstrap",
|
||||
"containerEnv": {
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
"containerEnv": { "DEVCONTAINER": "1" },
|
||||
// Port 5683 udp is used by Shelly integration
|
||||
"appPort": ["8123:8123", "5683:5683/udp"],
|
||||
"runArgs": [
|
||||
"-e",
|
||||
"GIT_EDITOR=code --wait",
|
||||
"--security-opt",
|
||||
"label=disable"
|
||||
],
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait"],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
|
@ -27,17 +17,12 @@
|
|||
"visualstudioexptteam.vscodeintellicode",
|
||||
"redhat.vscode-yaml",
|
||||
"esbenp.prettier-vscode",
|
||||
"GitHub.vscode-pull-request-github",
|
||||
"GitHub.copilot"
|
||||
"GitHub.vscode-pull-request-github"
|
||||
],
|
||||
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
|
||||
"settings": {
|
||||
"python.experiments.optOutFrom": ["pythonTestAdapter"],
|
||||
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
|
@ -58,13 +43,7 @@
|
|||
],
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
},
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ docs
|
|||
# Development
|
||||
.devcontainer
|
||||
.vscode
|
||||
.tool-versions
|
||||
|
||||
# Test related files
|
||||
tests
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
# Black
|
||||
4de97abc3aa83188666336ce0a015a5bab75bc8f
|
||||
|
||||
# Switch formatting from black to ruff-format (#102893)
|
||||
706add4a57120a93d7b7fe40e722b00d634c76c2
|
||||
|
||||
# Prettify json (component test fixtures) (#68892)
|
||||
053c4428a933c3c04c22642f93c93fccba3e8bfd
|
||||
|
||||
# Prettify json (tests) (#68888)
|
||||
496d90bf00429d9d924caeb0155edc0bf54e86b9
|
||||
|
||||
# Bump ruff to 0.3.4 (#112690)
|
||||
6bb4e7d62c60389608acf4a7d7dacd8f029307dd
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
|
@ -1 +1,2 @@
|
|||
custom: https://www.openhomefoundation.org
|
||||
custom: https://www.nabucasa.com
|
||||
github: balloob
|
||||
|
|
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -74,6 +74,7 @@ If the code communicates with devices, web services, or third-party tools:
|
|||
- [ ] New or updated dependencies have been added to `requirements_all.txt`.
|
||||
Updated by running `python3 -m script.gen_requirements_all`.
|
||||
- [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description.
|
||||
- [ ] Untested files have been added to `.coveragerc`.
|
||||
|
||||
<!--
|
||||
This project is very active and we have a high turnover of pull requests.
|
||||
|
|
291
.github/workflows/builder.yml
vendored
291
.github/workflows/builder.yml
vendored
|
@ -10,10 +10,8 @@ on:
|
|||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
|
||||
jobs:
|
||||
init:
|
||||
|
@ -27,12 +25,12 @@ jobs:
|
|||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
@ -51,29 +49,41 @@ jobs:
|
|||
with:
|
||||
ignore-dev: true
|
||||
|
||||
- name: Fail if translations files are checked in
|
||||
run: |
|
||||
if [ -n "$(find homeassistant/components/*/translations -type f)" ]; then
|
||||
echo "Translations files are checked in, please remove the following files:"
|
||||
find homeassistant/components/*/translations -type f
|
||||
exit 1
|
||||
fi
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
environment: ${{ needs.init.outputs.channel }}
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download Translations
|
||||
run: python3 -m script.translations download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
|
||||
- name: Archive translations
|
||||
- name: Build package
|
||||
shell: bash
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
run: |
|
||||
# Remove dist, build, and homeassistant.egg-info
|
||||
# when build locally for testing!
|
||||
pip install twine build
|
||||
python -m build
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
run: |
|
||||
export TWINE_USERNAME="__token__"
|
||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
|
||||
build_base:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
|
@ -85,16 +95,15 @@ jobs:
|
|||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v6
|
||||
uses: dawidd6/action-download-artifact@v3.0.0
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
|
@ -105,7 +114,7 @@ jobs:
|
|||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v6
|
||||
uses: dawidd6/action-download-artifact@v3.0.0
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
|
@ -116,20 +125,17 @@ jobs:
|
|||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Adjust nightly version
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
shell: bash
|
||||
env:
|
||||
UV_PRERELEASE: allow
|
||||
run: |
|
||||
python3 -m pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install packaging tomli
|
||||
uv pip install .
|
||||
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
|
||||
python3 -m pip install packaging tomli
|
||||
python3 -m pip install .
|
||||
version="$(python3 script/version_bump.py nightly)"
|
||||
|
||||
if [[ "$(ls home_assistant_frontend*.whl)" =~ ^home_assistant_frontend-(.*)-py3-none-any.whl$ ]]; then
|
||||
echo "Found frontend wheel, setting version to: ${BASH_REMATCH[1]}"
|
||||
|
@ -141,7 +147,7 @@ jobs:
|
|||
sed -i "s|home-assistant-frontend==.*|home-assistant-frontend==${BASH_REMATCH[1]}|" \
|
||||
homeassistant/package_constraints.txt
|
||||
|
||||
sed -i "s|home-assistant-frontend==.*||" requirements_all.txt
|
||||
python -m script.gen_requirements_all
|
||||
fi
|
||||
|
||||
if [[ "$(ls home_assistant_intents*.whl)" =~ ^home_assistant_intents-(.*)-py3-none-any.whl$ ]]; then
|
||||
|
@ -159,7 +165,7 @@ jobs:
|
|||
sed -i "s|home-assistant-intents==.*|home-assistant-intents==${BASH_REMATCH[1]}|" \
|
||||
homeassistant/package_constraints.txt
|
||||
|
||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
||||
python -m script.gen_requirements_all
|
||||
fi
|
||||
|
||||
- name: Adjustments for armhf
|
||||
|
@ -174,15 +180,19 @@ jobs:
|
|||
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
- name: Adjustments for 64-bit
|
||||
if: matrix.arch == 'amd64' || matrix.arch == 'aarch64'
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
# Some speedups are only available on 64-bit, and since
|
||||
# we build 32bit images on 64bit hosts, we only enable
|
||||
# the speed ups on 64bit since the wheels for 32bit
|
||||
# are not available.
|
||||
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" requirements_all.txt
|
||||
|
||||
- name: Download Translations
|
||||
run: python3 -m script.translations download
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
|
||||
- name: Write meta info file
|
||||
shell: bash
|
||||
|
@ -190,14 +200,14 @@ jobs:
|
|||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2024.01.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
|
@ -206,6 +216,17 @@ jobs:
|
|||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Archive translations
|
||||
shell: bash
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
|
@ -242,7 +263,7 @@ jobs:
|
|||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
|
@ -256,14 +277,14 @@ jobs:
|
|||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2024.01.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
|
@ -279,7 +300,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
|
@ -315,29 +336,23 @@ jobs:
|
|||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
uses: sigstore/cosign-installer@v3.4.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
cosign-release: "v2.0.2"
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
@ -351,37 +366,41 @@ jobs:
|
|||
function create_manifest() {
|
||||
local tag_l=${1}
|
||||
local tag_r=${2}
|
||||
local registry=${{ matrix.registry }}
|
||||
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
for registry in "ghcr.io/home-assistant" "docker.io/homeassistant"
|
||||
do
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
--os linux --arch 386
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v6
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
--os linux --arch 386
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v7
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v6
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v7
|
||||
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
|
||||
done
|
||||
}
|
||||
|
||||
function validate_image() {
|
||||
|
@ -414,14 +433,12 @@ jobs:
|
|||
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create version tag
|
||||
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
|
||||
|
@ -442,97 +459,3 @@ jobs:
|
|||
v="${{ needs.init.outputs.version }}"
|
||||
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
environment: ${{ needs.init.outputs.channel }}
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
|
||||
- name: Build package
|
||||
shell: bash
|
||||
run: |
|
||||
# Remove dist, build, and homeassistant.egg-info
|
||||
# when build locally for testing!
|
||||
pip install twine build
|
||||
python -m build
|
||||
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
run: |
|
||||
export TWINE_USERNAME="__token__"
|
||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
|
||||
hassfest-image:
|
||||
name: Build and test hassfest image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
needs: ["init"]
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
env:
|
||||
HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
load: true
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||
|
||||
- name: Run hassfest against core
|
||||
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
|
||||
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
push: true
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
|
682
.github/workflows/ci.yaml
vendored
682
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load diff
11
.github/workflows/codeql.yml
vendored
11
.github/workflows/codeql.yml
vendored
|
@ -2,6 +2,11 @@ name: "CodeQL"
|
|||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- rc
|
||||
- master
|
||||
schedule:
|
||||
- cron: "30 18 * * 4"
|
||||
|
||||
|
@ -21,14 +26,14 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.3
|
||||
uses: github/codeql-action/init@v3.23.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.3
|
||||
uses: github/codeql-action/analyze@v3.23.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
|
6
.github/workflows/translations.yml
vendored
6
.github/workflows/translations.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
|||
- "**strings.json"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
DEFAULT_PYTHON: "3.11"
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
|
@ -19,10 +19,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
|
170
.github/workflows/wheels.yml
vendored
170
.github/workflows/wheels.yml
vendored
|
@ -14,10 +14,6 @@ on:
|
|||
- "homeassistant/package_constraints.txt"
|
||||
- "requirements_all.txt"
|
||||
- "requirements.txt"
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
|
@ -32,22 +28,7 @@ jobs:
|
|||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
- name: Create Python virtual environment
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install -r requirements.txt
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
|
@ -64,8 +45,11 @@ jobs:
|
|||
- name: Write env-file
|
||||
run: |
|
||||
(
|
||||
echo "GRPC_BUILD_WITH_BORING_SSL_ASM=false"
|
||||
echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true"
|
||||
echo "GRPC_PYTHON_BUILD_WITH_CYTHON=true"
|
||||
echo "GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=true"
|
||||
echo "GRPC_PYTHON_LDFLAGS=-lpthread -Wl,-wrap,memcpy -static-libgcc"
|
||||
|
||||
# Fix out of memory issues with rust
|
||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
||||
|
@ -79,30 +63,16 @@ jobs:
|
|||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v3.1.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v3.1.2
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Generate requirements
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
|
||||
core:
|
||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||
|
@ -112,38 +82,32 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312", "cp313"]
|
||||
abi: ["cp311", "cp312"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.01.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
||||
skip-binary: aiohttp
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements.txt"
|
||||
|
@ -156,26 +120,73 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312", "cp313"]
|
||||
abi: ["cp311", "cp312"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
- name: (Un)comment packages
|
||||
run: |
|
||||
requirement_files="requirements_all.txt requirements_diff.txt"
|
||||
for requirement_file in ${requirement_files}; do
|
||||
sed -i "s|# fritzconnection|fritzconnection|g" ${requirement_file}
|
||||
sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file}
|
||||
sed -i "s|# evdev|evdev|g" ${requirement_file}
|
||||
sed -i "s|# pycups|pycups|g" ${requirement_file}
|
||||
sed -i "s|# homekit|homekit|g" ${requirement_file}
|
||||
sed -i "s|# decora-wifi|decora-wifi|g" ${requirement_file}
|
||||
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
|
||||
|
||||
# Some packages are not buildable on armhf anymore
|
||||
if [ "${{ matrix.arch }}" = "armhf" ]; then
|
||||
|
||||
# Pandas has issues building on armhf, it is expected they
|
||||
# will drop the platform in the near future (they consider it
|
||||
# "flimsy" on 386). The following packages depend on pandas,
|
||||
# so we comment them out.
|
||||
sed -i "s|env-canada|# env-canada|g" ${requirement_file}
|
||||
sed -i "s|noaa-coops|# noaa-coops|g" ${requirement_file}
|
||||
sed -i "s|pyezviz|# pyezviz|g" ${requirement_file}
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" ${requirement_file}
|
||||
fi
|
||||
|
||||
# Some speedups are only for 64-bit
|
||||
if [ "${{ matrix.arch }}" = "amd64" ] || [ "${{ matrix.arch }}" = "aarch64" ]; then
|
||||
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" ${requirement_file}
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into two different files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
# Build these first.
|
||||
# grpcio: https://github.com/grpc/grpc/issues/33918
|
||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
||||
|
||||
touch requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
|
@ -185,32 +196,9 @@ jobs:
|
|||
|
||||
# Do not pin numpy in wheels building
|
||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
# Build these first.
|
||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
||||
|
||||
touch requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
uses: home-assistant/wheels@2024.01.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
@ -218,50 +206,50 @@ jobs:
|
|||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_old-cython.txt"
|
||||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.01.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.01.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.01.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -34,7 +34,6 @@ Icon
|
|||
|
||||
# GITHUB Proposed Python stuff:
|
||||
*.py[cod]
|
||||
__pycache__
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
@ -79,7 +78,6 @@ pytest-*.txt
|
|||
.pydevproject
|
||||
|
||||
.python-version
|
||||
.tool-versions
|
||||
|
||||
# emacs auto backups
|
||||
*~
|
||||
|
@ -134,6 +132,3 @@ tmp_cache
|
|||
|
||||
# python-language-server / Rope
|
||||
.ropeproject
|
||||
|
||||
# Will be created from script/split_tests.py
|
||||
pytest_buckets.txt
|
|
@ -1,21 +1,21 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.3
|
||||
rev: v0.1.15
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
- --fix
|
||||
- id: ruff-format
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.(py|pyi)$
|
||||
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.3.0
|
||||
rev: v2.2.2
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --ignore-words-list=additionals,alle,alot,bund,currenty,datas,farenheit,falsy,fo,haa,hass,iif,incomfort,ines,ist,nam,nd,pres,pullrequests,resset,rime,ser,serie,te,technik,ue,unsecure,withing,zar
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json, html]
|
||||
exclude_types: [csv, json]
|
||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
|
@ -30,7 +30,7 @@ repos:
|
|||
- --branch=master
|
||||
- --branch=rc
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.35.1
|
||||
rev: v1.32.0
|
||||
hooks:
|
||||
- id: yamllint
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
@ -61,15 +61,15 @@ repos:
|
|||
name: mypy
|
||||
entry: script/run-in-env.sh mypy
|
||||
language: script
|
||||
types_or: [python, pyi]
|
||||
types: [python]
|
||||
require_serial: true
|
||||
files: ^(homeassistant|pylint)/.+\.(py|pyi)$
|
||||
files: ^(homeassistant|pylint)/.+\.py$
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: script/run-in-env.sh pylint -j 0 --ignore-missing-annotations=y
|
||||
language: script
|
||||
types_or: [python, pyi]
|
||||
files: ^(homeassistant|tests)/.+\.(py|pyi)$
|
||||
types: [python]
|
||||
files: ^homeassistant/.+\.py$
|
||||
- id: gen_requirements_all
|
||||
name: gen_requirements_all
|
||||
entry: script/run-in-env.sh python3 -m script.gen_requirements_all
|
||||
|
@ -83,14 +83,14 @@ repos:
|
|||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|\.coveragerc|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
*.md
|
||||
.strict-typing
|
||||
azure-*.yml
|
||||
homeassistant/components/*/translations/*.json
|
||||
homeassistant/generated/*
|
||||
tests/components/lidarr/fixtures/initialize.js
|
||||
|
|
|
@ -21,7 +21,6 @@ homeassistant.helpers.entity_platform
|
|||
homeassistant.helpers.entity_values
|
||||
homeassistant.helpers.event
|
||||
homeassistant.helpers.reload
|
||||
homeassistant.helpers.script
|
||||
homeassistant.helpers.script_variables
|
||||
homeassistant.helpers.singleton
|
||||
homeassistant.helpers.sun
|
||||
|
@ -49,7 +48,6 @@ homeassistant.components.adax.*
|
|||
homeassistant.components.adguard.*
|
||||
homeassistant.components.aftership.*
|
||||
homeassistant.components.air_quality.*
|
||||
homeassistant.components.airgradient.*
|
||||
homeassistant.components.airly.*
|
||||
homeassistant.components.airnow.*
|
||||
homeassistant.components.airq.*
|
||||
|
@ -67,7 +65,7 @@ homeassistant.components.alexa.*
|
|||
homeassistant.components.alpha_vantage.*
|
||||
homeassistant.components.amazon_polly.*
|
||||
homeassistant.components.amberelectric.*
|
||||
homeassistant.components.ambient_network.*
|
||||
homeassistant.components.ambiclimate.*
|
||||
homeassistant.components.ambient_station.*
|
||||
homeassistant.components.amcrest.*
|
||||
homeassistant.components.ampio.*
|
||||
|
@ -82,10 +80,8 @@ homeassistant.components.anthemav.*
|
|||
homeassistant.components.apache_kafka.*
|
||||
homeassistant.components.apcupsd.*
|
||||
homeassistant.components.api.*
|
||||
homeassistant.components.apple_tv.*
|
||||
homeassistant.components.apprise.*
|
||||
homeassistant.components.aprs.*
|
||||
homeassistant.components.apsystems.*
|
||||
homeassistant.components.aqualogic.*
|
||||
homeassistant.components.aquostv.*
|
||||
homeassistant.components.aranet.*
|
||||
|
@ -95,9 +91,9 @@ homeassistant.components.aruba.*
|
|||
homeassistant.components.arwn.*
|
||||
homeassistant.components.aseko_pool_live.*
|
||||
homeassistant.components.assist_pipeline.*
|
||||
homeassistant.components.assist_satellite.*
|
||||
homeassistant.components.asterisk_cdr.*
|
||||
homeassistant.components.asterisk_mbox.*
|
||||
homeassistant.components.asuswrt.*
|
||||
homeassistant.components.autarco.*
|
||||
homeassistant.components.auth.*
|
||||
homeassistant.components.automation.*
|
||||
homeassistant.components.awair.*
|
||||
|
@ -111,7 +107,6 @@ homeassistant.components.bitcoin.*
|
|||
homeassistant.components.blockchain.*
|
||||
homeassistant.components.blue_current.*
|
||||
homeassistant.components.blueprint.*
|
||||
homeassistant.components.bluesound.*
|
||||
homeassistant.components.bluetooth.*
|
||||
homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
|
@ -120,11 +115,9 @@ homeassistant.components.bond.*
|
|||
homeassistant.components.braviatv.*
|
||||
homeassistant.components.brother.*
|
||||
homeassistant.components.browser.*
|
||||
homeassistant.components.bryant_evolution.*
|
||||
homeassistant.components.bthome.*
|
||||
homeassistant.components.button.*
|
||||
homeassistant.components.calendar.*
|
||||
homeassistant.components.cambridge_audio.*
|
||||
homeassistant.components.camera.*
|
||||
homeassistant.components.canary.*
|
||||
homeassistant.components.cert_expiry.*
|
||||
|
@ -142,7 +135,6 @@ homeassistant.components.cpuspeed.*
|
|||
homeassistant.components.crownstone.*
|
||||
homeassistant.components.date.*
|
||||
homeassistant.components.datetime.*
|
||||
homeassistant.components.deako.*
|
||||
homeassistant.components.deconz.*
|
||||
homeassistant.components.default_config.*
|
||||
homeassistant.components.demo.*
|
||||
|
@ -170,16 +162,13 @@ homeassistant.components.ecowitt.*
|
|||
homeassistant.components.efergy.*
|
||||
homeassistant.components.electrasmart.*
|
||||
homeassistant.components.electric_kiwi.*
|
||||
homeassistant.components.elevenlabs.*
|
||||
homeassistant.components.elgato.*
|
||||
homeassistant.components.elkm1.*
|
||||
homeassistant.components.emulated_hue.*
|
||||
homeassistant.components.energenie_power_sockets.*
|
||||
homeassistant.components.energy.*
|
||||
homeassistant.components.energyzero.*
|
||||
homeassistant.components.enigma2.*
|
||||
homeassistant.components.enphase_envoy.*
|
||||
homeassistant.components.eq3btsmart.*
|
||||
homeassistant.components.esphome.*
|
||||
homeassistant.components.event.*
|
||||
homeassistant.components.evil_genius_labs.*
|
||||
|
@ -200,23 +189,17 @@ homeassistant.components.fritzbox.*
|
|||
homeassistant.components.fritzbox_callmonitor.*
|
||||
homeassistant.components.fronius.*
|
||||
homeassistant.components.frontend.*
|
||||
homeassistant.components.fujitsu_fglair.*
|
||||
homeassistant.components.fully_kiosk.*
|
||||
homeassistant.components.fyta.*
|
||||
homeassistant.components.generic_hygrostat.*
|
||||
homeassistant.components.generic_thermostat.*
|
||||
homeassistant.components.geo_location.*
|
||||
homeassistant.components.geocaching.*
|
||||
homeassistant.components.gios.*
|
||||
homeassistant.components.glances.*
|
||||
homeassistant.components.go2rtc.*
|
||||
homeassistant.components.goalzero.*
|
||||
homeassistant.components.google.*
|
||||
homeassistant.components.google_assistant_sdk.*
|
||||
homeassistant.components.google_cloud.*
|
||||
homeassistant.components.google_photos.*
|
||||
homeassistant.components.google_sheets.*
|
||||
homeassistant.components.govee_ble.*
|
||||
homeassistant.components.gpsd.*
|
||||
homeassistant.components.greeneye_monitor.*
|
||||
homeassistant.components.group.*
|
||||
|
@ -244,11 +227,9 @@ homeassistant.components.homekit_controller.select
|
|||
homeassistant.components.homekit_controller.storage
|
||||
homeassistant.components.homekit_controller.utils
|
||||
homeassistant.components.homewizard.*
|
||||
homeassistant.components.homeworks.*
|
||||
homeassistant.components.http.*
|
||||
homeassistant.components.huawei_lte.*
|
||||
homeassistant.components.humidifier.*
|
||||
homeassistant.components.husqvarna_automower.*
|
||||
homeassistant.components.hydrawise.*
|
||||
homeassistant.components.hyperion.*
|
||||
homeassistant.components.ibeacon.*
|
||||
|
@ -257,7 +238,6 @@ homeassistant.components.image.*
|
|||
homeassistant.components.image_processing.*
|
||||
homeassistant.components.image_upload.*
|
||||
homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
|
@ -265,7 +245,6 @@ homeassistant.components.integration.*
|
|||
homeassistant.components.intent.*
|
||||
homeassistant.components.intent_script.*
|
||||
homeassistant.components.ios.*
|
||||
homeassistant.components.iotty.*
|
||||
homeassistant.components.ipp.*
|
||||
homeassistant.components.iqvia.*
|
||||
homeassistant.components.islamic_prayer_times.*
|
||||
|
@ -274,7 +253,6 @@ homeassistant.components.jellyfin.*
|
|||
homeassistant.components.jewish_calendar.*
|
||||
homeassistant.components.jvc_projector.*
|
||||
homeassistant.components.kaleidescape.*
|
||||
homeassistant.components.knocki.*
|
||||
homeassistant.components.knx.*
|
||||
homeassistant.components.kraken.*
|
||||
homeassistant.components.lacrosse.*
|
||||
|
@ -286,12 +264,10 @@ homeassistant.components.lawn_mower.*
|
|||
homeassistant.components.lcn.*
|
||||
homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
homeassistant.components.linear_garage_door.*
|
||||
homeassistant.components.linkplay.*
|
||||
homeassistant.components.litejet.*
|
||||
homeassistant.components.litterrobot.*
|
||||
homeassistant.components.local_ip.*
|
||||
|
@ -302,8 +278,8 @@ homeassistant.components.logger.*
|
|||
homeassistant.components.london_underground.*
|
||||
homeassistant.components.lookin.*
|
||||
homeassistant.components.luftdaten.*
|
||||
homeassistant.components.madvr.*
|
||||
homeassistant.components.manual.*
|
||||
homeassistant.components.mailbox.*
|
||||
homeassistant.components.map.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
|
@ -318,19 +294,15 @@ homeassistant.components.minecraft_server.*
|
|||
homeassistant.components.mjpeg.*
|
||||
homeassistant.components.modbus.*
|
||||
homeassistant.components.modem_callerid.*
|
||||
homeassistant.components.mold_indicator.*
|
||||
homeassistant.components.monzo.*
|
||||
homeassistant.components.moon.*
|
||||
homeassistant.components.mopeka.*
|
||||
homeassistant.components.motionmount.*
|
||||
homeassistant.components.mqtt.*
|
||||
homeassistant.components.music_assistant.*
|
||||
homeassistant.components.my.*
|
||||
homeassistant.components.mysensors.*
|
||||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
|
@ -340,7 +312,6 @@ homeassistant.components.nfandroidtv.*
|
|||
homeassistant.components.nightscout.*
|
||||
homeassistant.components.nissan_leaf.*
|
||||
homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.number.*
|
||||
|
@ -348,9 +319,7 @@ homeassistant.components.nut.*
|
|||
homeassistant.components.onboarding.*
|
||||
homeassistant.components.oncue.*
|
||||
homeassistant.components.onewire.*
|
||||
homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
|
@ -358,12 +327,12 @@ homeassistant.components.oralb.*
|
|||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.poolsense.*
|
||||
homeassistant.components.powerwall.*
|
||||
homeassistant.components.private_ble_device.*
|
||||
homeassistant.components.prometheus.*
|
||||
|
@ -376,7 +345,6 @@ homeassistant.components.pvoutput.*
|
|||
homeassistant.components.qnap_qsw.*
|
||||
homeassistant.components.rabbitair.*
|
||||
homeassistant.components.radarr.*
|
||||
homeassistant.components.radio_browser.*
|
||||
homeassistant.components.rainforest_raven.*
|
||||
homeassistant.components.rainmachine.*
|
||||
homeassistant.components.raspberry_pi.*
|
||||
|
@ -391,9 +359,7 @@ homeassistant.components.rest_command.*
|
|||
homeassistant.components.rfxtrx.*
|
||||
homeassistant.components.rhasspy.*
|
||||
homeassistant.components.ridwell.*
|
||||
homeassistant.components.ring.*
|
||||
homeassistant.components.rituals_perfume_genie.*
|
||||
homeassistant.components.roborock.*
|
||||
homeassistant.components.roku.*
|
||||
homeassistant.components.romy.*
|
||||
homeassistant.components.rpi_power.*
|
||||
|
@ -405,16 +371,13 @@ homeassistant.components.samsungtv.*
|
|||
homeassistant.components.scene.*
|
||||
homeassistant.components.schedule.*
|
||||
homeassistant.components.scrape.*
|
||||
homeassistant.components.script.*
|
||||
homeassistant.components.search.*
|
||||
homeassistant.components.select.*
|
||||
homeassistant.components.sensibo.*
|
||||
homeassistant.components.sensirion_ble.*
|
||||
homeassistant.components.sensor.*
|
||||
homeassistant.components.sensoterra.*
|
||||
homeassistant.components.senz.*
|
||||
homeassistant.components.sfr_box.*
|
||||
homeassistant.components.shell_command.*
|
||||
homeassistant.components.shelly.*
|
||||
homeassistant.components.shopping_list.*
|
||||
homeassistant.components.simplepush.*
|
||||
|
@ -424,14 +387,10 @@ homeassistant.components.skybell.*
|
|||
homeassistant.components.slack.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
homeassistant.components.speedtestdotnet.*
|
||||
homeassistant.components.spotify.*
|
||||
homeassistant.components.sql.*
|
||||
homeassistant.components.squeezebox.*
|
||||
homeassistant.components.ssdp.*
|
||||
homeassistant.components.starlink.*
|
||||
homeassistant.components.statistics.*
|
||||
|
@ -444,7 +403,6 @@ homeassistant.components.suez_water.*
|
|||
homeassistant.components.sun.*
|
||||
homeassistant.components.surepetcare.*
|
||||
homeassistant.components.switch.*
|
||||
homeassistant.components.switch_as_x.*
|
||||
homeassistant.components.switchbee.*
|
||||
homeassistant.components.switchbot_cloud.*
|
||||
homeassistant.components.switcher_kis.*
|
||||
|
@ -461,7 +419,6 @@ homeassistant.components.tcp.*
|
|||
homeassistant.components.technove.*
|
||||
homeassistant.components.tedee.*
|
||||
homeassistant.components.text.*
|
||||
homeassistant.components.thethingsnetwork.*
|
||||
homeassistant.components.threshold.*
|
||||
homeassistant.components.tibber.*
|
||||
homeassistant.components.tile.*
|
||||
|
@ -492,7 +449,6 @@ homeassistant.components.update.*
|
|||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
homeassistant.components.vacuum.*
|
||||
homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
|
@ -513,7 +469,6 @@ homeassistant.components.whois.*
|
|||
homeassistant.components.withings.*
|
||||
homeassistant.components.wiz.*
|
||||
homeassistant.components.wled.*
|
||||
homeassistant.components.workday.*
|
||||
homeassistant.components.worldclock.*
|
||||
homeassistant.components.xiaomi_ble.*
|
||||
homeassistant.components.yale_smart_alarm.*
|
||||
|
|
42
.vscode/launch.json
vendored
42
.vscode/launch.json
vendored
|
@ -6,52 +6,38 @@
|
|||
"configurations": [
|
||||
{
|
||||
"name": "Home Assistant",
|
||||
"type": "debugpy",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "homeassistant",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
"--debug",
|
||||
"-c",
|
||||
"config"
|
||||
],
|
||||
"args": ["--debug", "-c", "config"],
|
||||
"preLaunchTask": "Compile English translations"
|
||||
},
|
||||
{
|
||||
"name": "Home Assistant (skip pip)",
|
||||
"type": "debugpy",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "homeassistant",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
"--debug",
|
||||
"-c",
|
||||
"config",
|
||||
"--skip-pip"
|
||||
],
|
||||
"args": ["--debug", "-c", "config", "--skip-pip"],
|
||||
"preLaunchTask": "Compile English translations"
|
||||
},
|
||||
{
|
||||
"name": "Home Assistant: Changed tests",
|
||||
"type": "debugpy",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
"--timeout=10",
|
||||
"--picked"
|
||||
],
|
||||
"args": ["--timeout=10", "--picked"],
|
||||
},
|
||||
{
|
||||
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
|
||||
// See https://www.home-assistant.io/integrations/debugpy/
|
||||
"name": "Home Assistant: Attach Local",
|
||||
"type": "debugpy",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"port": 5678,
|
||||
"host": "localhost"
|
||||
},
|
||||
"port": 5678,
|
||||
"host": "localhost",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
|
@ -63,12 +49,10 @@
|
|||
// Debug by attaching to remote Home Assistant server using Remote Python Debugger.
|
||||
// See https://www.home-assistant.io/integrations/debugpy/
|
||||
"name": "Home Assistant: Attach Remote",
|
||||
"type": "debugpy",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"port": 5678,
|
||||
"host": "homeassistant.local"
|
||||
},
|
||||
"port": 5678,
|
||||
"host": "homeassistant.local",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
|
@ -77,4 +61,4 @@
|
|||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
12
.vscode/settings.default.json
vendored
12
.vscode/settings.default.json
vendored
|
@ -4,15 +4,5 @@
|
|||
// https://github.com/microsoft/vscode-python/issues/14067
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
// https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings
|
||||
"python.testing.pytestEnabled": false,
|
||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": [
|
||||
"homeassistant/components/*/manifest.json"
|
||||
],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
"python.testing.pytestEnabled": false
|
||||
}
|
||||
|
|
5
.vscode/tasks.json
vendored
5
.vscode/tasks.json
vendored
|
@ -76,7 +76,6 @@
|
|||
"detail": "Generate code coverage report for a given integration.",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
||||
"dependsOn": ["Compile English translations"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
@ -104,7 +103,7 @@
|
|||
{
|
||||
"label": "Install all Requirements",
|
||||
"type": "shell",
|
||||
"command": "uv pip install -r requirements_all.txt",
|
||||
"command": "pip3 install -r requirements_all.txt",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
|
@ -118,7 +117,7 @@
|
|||
{
|
||||
"label": "Install all Test Requirements",
|
||||
"type": "shell",
|
||||
"command": "uv pip install -r requirements_test_all.txt",
|
||||
"command": "pip3 install -r requirements_test_all.txt",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
ignore: |
|
||||
azure-*.yml
|
||||
tests/fixtures/core/config/yaml_errors/
|
||||
rules:
|
||||
braces:
|
||||
|
|
379
CODEOWNERS
379
CODEOWNERS
|
@ -5,30 +5,13 @@
|
|||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
|
||||
|
||||
# Home Assistant Core
|
||||
.core_files.yaml @home-assistant/core
|
||||
.git-blame-ignore-revs @home-assistant/core
|
||||
.gitattributes @home-assistant/core
|
||||
.gitignore @home-assistant/core
|
||||
.hadolint.yaml @home-assistant/core
|
||||
.pre-commit-config.yaml @home-assistant/core
|
||||
.prettierignore @home-assistant/core
|
||||
.yamllint @home-assistant/core
|
||||
setup.cfg @home-assistant/core
|
||||
pyproject.toml @home-assistant/core
|
||||
requirements_test.txt @home-assistant/core
|
||||
/.devcontainer/ @home-assistant/core
|
||||
/.github/ @home-assistant/core
|
||||
/.vscode/ @home-assistant/core
|
||||
/homeassistant/*.py @home-assistant/core
|
||||
/homeassistant/auth/ @home-assistant/core
|
||||
/homeassistant/backports/ @home-assistant/core
|
||||
/homeassistant/helpers/ @home-assistant/core
|
||||
/homeassistant/scripts/ @home-assistant/core
|
||||
/homeassistant/util/ @home-assistant/core
|
||||
/pylint/ @home-assistant/core
|
||||
/script/ @home-assistant/core
|
||||
|
||||
# Home Assistant Supervisor
|
||||
.dockerignore @home-assistant/supervisor
|
||||
build.json @home-assistant/supervisor
|
||||
/machine/ @home-assistant/supervisor
|
||||
/rootfs/ @home-assistant/supervisor
|
||||
|
@ -40,8 +23,6 @@ build.json @home-assistant/supervisor
|
|||
# Integrations
|
||||
/homeassistant/components/abode/ @shred86
|
||||
/tests/components/abode/ @shred86
|
||||
/homeassistant/components/acaia/ @zweckj
|
||||
/tests/components/acaia/ @zweckj
|
||||
/homeassistant/components/accuweather/ @bieniu
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
|
@ -50,7 +31,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/adax/ @danielhiversen
|
||||
/homeassistant/components/adguard/ @frenck
|
||||
/tests/components/adguard/ @frenck
|
||||
/homeassistant/components/ads/ @mrpasztoradam
|
||||
/homeassistant/components/advantage_air/ @Bre77
|
||||
/tests/components/advantage_air/ @Bre77
|
||||
/homeassistant/components/aemet/ @Noltari
|
||||
|
@ -59,8 +39,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/agent_dvr/ @ispysoftware
|
||||
/homeassistant/components/air_quality/ @home-assistant/core
|
||||
/tests/components/air_quality/ @home-assistant/core
|
||||
/homeassistant/components/airgradient/ @airgradienthq @joostlek
|
||||
/tests/components/airgradient/ @airgradienthq @joostlek
|
||||
/homeassistant/components/airly/ @bieniu
|
||||
/tests/components/airly/ @bieniu
|
||||
/homeassistant/components/airnow/ @asymworks
|
||||
|
@ -83,17 +61,18 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/airzone/ @Noltari
|
||||
/homeassistant/components/airzone_cloud/ @Noltari
|
||||
/tests/components/airzone_cloud/ @Noltari
|
||||
/homeassistant/components/aladdin_connect/ @mkmer
|
||||
/tests/components/aladdin_connect/ @mkmer
|
||||
/homeassistant/components/alarm_control_panel/ @home-assistant/core
|
||||
/tests/components/alarm_control_panel/ @home-assistant/core
|
||||
/homeassistant/components/alert/ @home-assistant/core @frenck
|
||||
/tests/components/alert/ @home-assistant/core @frenck
|
||||
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
|
||||
/homeassistant/components/amazon_polly/ @jschlyter
|
||||
/homeassistant/components/amberelectric/ @madpilot
|
||||
/tests/components/amberelectric/ @madpilot
|
||||
/homeassistant/components/ambient_network/ @thomaskistler
|
||||
/tests/components/ambient_network/ @thomaskistler
|
||||
/homeassistant/components/ambiclimate/ @danielhiversen
|
||||
/tests/components/ambiclimate/ @danielhiversen
|
||||
/homeassistant/components/ambient_station/ @bachya
|
||||
/tests/components/ambient_station/ @bachya
|
||||
/homeassistant/components/amcrest/ @flacjacket
|
||||
|
@ -111,8 +90,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/anova/ @Lash-L
|
||||
/homeassistant/components/anthemav/ @hyralex
|
||||
/tests/components/anthemav/ @hyralex
|
||||
/homeassistant/components/anthropic/ @Shulyaka
|
||||
/tests/components/anthropic/ @Shulyaka
|
||||
/homeassistant/components/aosmith/ @bdr99
|
||||
/tests/components/aosmith/ @bdr99
|
||||
/homeassistant/components/apache_kafka/ @bachya
|
||||
|
@ -127,27 +104,17 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/application_credentials/ @home-assistant/core
|
||||
/homeassistant/components/apprise/ @caronc
|
||||
/tests/components/apprise/ @caronc
|
||||
/homeassistant/components/aprilaire/ @chamberlain2007
|
||||
/tests/components/aprilaire/ @chamberlain2007
|
||||
/homeassistant/components/aprs/ @PhilRW
|
||||
/tests/components/aprs/ @PhilRW
|
||||
/homeassistant/components/apsystems/ @mawoka-myblock @SonnenladenGmbH
|
||||
/tests/components/apsystems/ @mawoka-myblock @SonnenladenGmbH
|
||||
/homeassistant/components/aquacell/ @Jordi1990
|
||||
/tests/components/aquacell/ @Jordi1990
|
||||
/homeassistant/components/aranet/ @aschmitz @thecode @anrijs
|
||||
/tests/components/aranet/ @aschmitz @thecode @anrijs
|
||||
/homeassistant/components/aranet/ @aschmitz @thecode
|
||||
/tests/components/aranet/ @aschmitz @thecode
|
||||
/homeassistant/components/arcam_fmj/ @elupus
|
||||
/tests/components/arcam_fmj/ @elupus
|
||||
/homeassistant/components/arris_tg2492lg/ @vanbalken
|
||||
/homeassistant/components/arve/ @ikalnyi
|
||||
/tests/components/arve/ @ikalnyi
|
||||
/homeassistant/components/aseko_pool_live/ @milanmeu
|
||||
/tests/components/aseko_pool_live/ @milanmeu
|
||||
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
|
||||
/tests/components/asuswrt/ @kennedyshead @ollo69
|
||||
/homeassistant/components/atag/ @MatsNL
|
||||
|
@ -162,8 +129,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/aurora_abb_powerone/ @davet2001
|
||||
/homeassistant/components/aussie_broadband/ @nickw444 @Bre77
|
||||
/tests/components/aussie_broadband/ @nickw444 @Bre77
|
||||
/homeassistant/components/autarco/ @klaasnicolaas
|
||||
/tests/components/autarco/ @klaasnicolaas
|
||||
/homeassistant/components/auth/ @home-assistant/core
|
||||
/tests/components/auth/ @home-assistant/core
|
||||
/homeassistant/components/automation/ @home-assistant/core
|
||||
|
@ -173,8 +138,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/awair/ @ahayworth @danielsjf
|
||||
/homeassistant/components/axis/ @Kane610
|
||||
/tests/components/axis/ @Kane610
|
||||
/homeassistant/components/azure_data_explorer/ @kaareseras
|
||||
/tests/components/azure_data_explorer/ @kaareseras
|
||||
/homeassistant/components/azure_devops/ @timmo001
|
||||
/tests/components/azure_devops/ @timmo001
|
||||
/homeassistant/components/azure_event_hub/ @eavanvalkenburg
|
||||
|
@ -194,8 +157,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/binary_sensor/ @home-assistant/core
|
||||
/tests/components/binary_sensor/ @home-assistant/core
|
||||
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
||||
/homeassistant/components/blebox/ @bbx-a @swistakm
|
||||
/tests/components/blebox/ @bbx-a @swistakm
|
||||
/homeassistant/components/blebox/ @bbx-a @riokuu
|
||||
/tests/components/blebox/ @bbx-a @riokuu
|
||||
/homeassistant/components/blink/ @fronzbot @mkmer
|
||||
/tests/components/blink/ @fronzbot @mkmer
|
||||
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
|
||||
|
@ -204,8 +167,7 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/bluemaestro/ @bdraco
|
||||
/homeassistant/components/blueprint/ @home-assistant/core
|
||||
/tests/components/blueprint/ @home-assistant/core
|
||||
/homeassistant/components/bluesound/ @thrawnarn @LouisChrist
|
||||
/tests/components/bluesound/ @thrawnarn @LouisChrist
|
||||
/homeassistant/components/bluesound/ @thrawnarn
|
||||
/homeassistant/components/bluetooth/ @bdraco
|
||||
/tests/components/bluetooth/ @bdraco
|
||||
/homeassistant/components/bluetooth_adapters/ @bdraco
|
||||
|
@ -228,21 +190,17 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/brottsplatskartan/ @gjohansson-ST
|
||||
/homeassistant/components/brunt/ @eavanvalkenburg
|
||||
/tests/components/brunt/ @eavanvalkenburg
|
||||
/homeassistant/components/bryant_evolution/ @danielsmyers
|
||||
/tests/components/bryant_evolution/ @danielsmyers
|
||||
/homeassistant/components/bsblan/ @liudger
|
||||
/tests/components/bsblan/ @liudger
|
||||
/homeassistant/components/bt_smarthub/ @typhoon2099
|
||||
/homeassistant/components/bthome/ @Ernst79 @thecode
|
||||
/tests/components/bthome/ @Ernst79 @thecode
|
||||
/homeassistant/components/bthome/ @Ernst79
|
||||
/tests/components/bthome/ @Ernst79
|
||||
/homeassistant/components/buienradar/ @mjj4791 @ties @Robbie1221
|
||||
/tests/components/buienradar/ @mjj4791 @ties @Robbie1221
|
||||
/homeassistant/components/button/ @home-assistant/core
|
||||
/tests/components/button/ @home-assistant/core
|
||||
/homeassistant/components/calendar/ @home-assistant/core
|
||||
/tests/components/calendar/ @home-assistant/core
|
||||
/homeassistant/components/cambridge_audio/ @noahhusby
|
||||
/tests/components/cambridge_audio/ @noahhusby
|
||||
/homeassistant/components/camera/ @home-assistant/core
|
||||
/tests/components/camera/ @home-assistant/core
|
||||
/homeassistant/components/cast/ @emontnemery
|
||||
|
@ -251,8 +209,7 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ccm15/ @ocalvo
|
||||
/homeassistant/components/cert_expiry/ @jjlawren
|
||||
/tests/components/cert_expiry/ @jjlawren
|
||||
/homeassistant/components/chacon_dio/ @cnico
|
||||
/tests/components/chacon_dio/ @cnico
|
||||
/homeassistant/components/circuit/ @braam
|
||||
/homeassistant/components/cisco_ios/ @fbradyirl
|
||||
/homeassistant/components/cisco_mobility_express/ @fbradyirl
|
||||
/homeassistant/components/cisco_webex_teams/ @fbradyirl
|
||||
|
@ -301,8 +258,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/date/ @home-assistant/core
|
||||
/homeassistant/components/datetime/ @home-assistant/core
|
||||
/tests/components/datetime/ @home-assistant/core
|
||||
/homeassistant/components/deako/ @sebirdman @balake @deakolights
|
||||
/tests/components/deako/ @sebirdman @balake @deakolights
|
||||
/homeassistant/components/debugpy/ @frenck
|
||||
/tests/components/debugpy/ @frenck
|
||||
/homeassistant/components/deconz/ @Kane610
|
||||
|
@ -342,8 +297,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/discovergy/ @jpbede
|
||||
/homeassistant/components/dlink/ @tkdrob
|
||||
/tests/components/dlink/ @tkdrob
|
||||
/homeassistant/components/dlna_dmr/ @chishm
|
||||
/tests/components/dlna_dmr/ @chishm
|
||||
/homeassistant/components/dlna_dmr/ @StevenLooman @chishm
|
||||
/tests/components/dlna_dmr/ @StevenLooman @chishm
|
||||
/homeassistant/components/dlna_dms/ @chishm
|
||||
/tests/components/dlna_dms/ @chishm
|
||||
/homeassistant/components/dnsip/ @gjohansson-ST
|
||||
|
@ -352,18 +307,14 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/doorbird/ @oblogic7 @bdraco @flacjacket
|
||||
/homeassistant/components/dormakaba_dkey/ @emontnemery
|
||||
/tests/components/dormakaba_dkey/ @emontnemery
|
||||
/homeassistant/components/downloader/ @erwindouna
|
||||
/tests/components/downloader/ @erwindouna
|
||||
/homeassistant/components/dremel_3d_printer/ @tkdrob
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duke_energy/ @hunterjm
|
||||
/tests/components/duke_energy/ @hunterjm
|
||||
/homeassistant/components/dsmr/ @Robbie1221 @frenck
|
||||
/tests/components/dsmr/ @Robbie1221 @frenck
|
||||
/homeassistant/components/dsmr_reader/ @depl0y @glodenox
|
||||
/tests/components/dsmr_reader/ @depl0y @glodenox
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
/tests/components/duotecno/ @cereal2nd
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @andarotajo
|
||||
|
@ -378,8 +329,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ecoforest/ @pjanuario
|
||||
/homeassistant/components/econet/ @w1ll1am23
|
||||
/tests/components/econet/ @w1ll1am23
|
||||
/homeassistant/components/ecovacs/ @mib1185 @edenhaus @Augar
|
||||
/tests/components/ecovacs/ @mib1185 @edenhaus @Augar
|
||||
/homeassistant/components/ecovacs/ @OverloadUT @mib1185 @edenhaus
|
||||
/tests/components/ecovacs/ @OverloadUT @mib1185 @edenhaus
|
||||
/homeassistant/components/ecowitt/ @pvizeli
|
||||
/tests/components/ecowitt/ @pvizeli
|
||||
/homeassistant/components/efergy/ @tkdrob
|
||||
|
@ -389,8 +340,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/electrasmart/ @jafar-atili
|
||||
/homeassistant/components/electric_kiwi/ @mikey0000
|
||||
/tests/components/electric_kiwi/ @mikey0000
|
||||
/homeassistant/components/elevenlabs/ @sorgfresser
|
||||
/tests/components/elevenlabs/ @sorgfresser
|
||||
/homeassistant/components/elgato/ @frenck
|
||||
/tests/components/elgato/ @frenck
|
||||
/homeassistant/components/elkm1/ @gwww @bdraco
|
||||
|
@ -401,38 +350,31 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/elvia/ @ludeeus
|
||||
/tests/components/elvia/ @ludeeus
|
||||
/homeassistant/components/emby/ @mezz64
|
||||
/homeassistant/components/emoncms/ @borpin @alexandrecuer
|
||||
/tests/components/emoncms/ @borpin @alexandrecuer
|
||||
/homeassistant/components/emoncms/ @borpin
|
||||
/homeassistant/components/emonitor/ @bdraco
|
||||
/tests/components/emonitor/ @bdraco
|
||||
/homeassistant/components/emulated_hue/ @bdraco @Tho85
|
||||
/tests/components/emulated_hue/ @bdraco @Tho85
|
||||
/homeassistant/components/emulated_kasa/ @kbickar
|
||||
/tests/components/emulated_kasa/ @kbickar
|
||||
/homeassistant/components/energenie_power_sockets/ @gnumpi
|
||||
/tests/components/energenie_power_sockets/ @gnumpi
|
||||
/homeassistant/components/energy/ @home-assistant/core
|
||||
/tests/components/energy/ @home-assistant/core
|
||||
/homeassistant/components/energyzero/ @klaasnicolaas
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
/tests/components/enigma2/ @autinerd
|
||||
/homeassistant/components/enocean/ @bdurrer
|
||||
/tests/components/enocean/ @bdurrer
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
|
||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @dgomes @joostlek @catsmanac
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
/tests/components/epion/ @lhgravendeel
|
||||
/homeassistant/components/epson/ @pszafer
|
||||
/tests/components/epson/ @pszafer
|
||||
/homeassistant/components/eq3btsmart/ @eulemitkeule @dbuezas
|
||||
/tests/components/eq3btsmart/ @eulemitkeule @dbuezas
|
||||
/homeassistant/components/epsonworkforce/ @ThaStealth
|
||||
/homeassistant/components/escea/ @lazdavila
|
||||
/tests/components/escea/ @lazdavila
|
||||
/homeassistant/components/esphome/ @OttoWinter @jesserockz @kbx81 @bdraco
|
||||
|
@ -444,7 +386,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/evil_genius_labs/ @balloob
|
||||
/tests/components/evil_genius_labs/ @balloob
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
/tests/components/evohome/ @zxdavb
|
||||
/homeassistant/components/ezviz/ @RenierM26 @baqs
|
||||
/tests/components/ezviz/ @RenierM26 @baqs
|
||||
/homeassistant/components/faa_delays/ @ntilley905
|
||||
|
@ -453,8 +394,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/fan/ @home-assistant/core
|
||||
/homeassistant/components/fastdotcom/ @rohankapoorcom @erwindouna
|
||||
/tests/components/fastdotcom/ @rohankapoorcom @erwindouna
|
||||
/homeassistant/components/feedreader/ @mib1185
|
||||
/tests/components/feedreader/ @mib1185
|
||||
/homeassistant/components/fibaro/ @rappenze
|
||||
/tests/components/fibaro/ @rappenze
|
||||
/homeassistant/components/file/ @fabaff
|
||||
|
@ -492,14 +431,14 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/forked_daapd/ @uvjustin
|
||||
/tests/components/forked_daapd/ @uvjustin
|
||||
/homeassistant/components/fortios/ @kimfrellsen
|
||||
/homeassistant/components/foscam/ @krmarien
|
||||
/tests/components/foscam/ @krmarien
|
||||
/homeassistant/components/foscam/ @skgsergio @krmarien
|
||||
/tests/components/foscam/ @skgsergio @krmarien
|
||||
/homeassistant/components/freebox/ @hacf-fr @Quentame
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
/tests/components/fritzbox/ @mib1185 @flabbamann
|
||||
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
|
||||
|
@ -510,12 +449,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/frontend/ @home-assistant/frontend
|
||||
/homeassistant/components/frontier_silicon/ @wlcrs
|
||||
/tests/components/frontier_silicon/ @wlcrs
|
||||
/homeassistant/components/fujitsu_fglair/ @crevetor
|
||||
/tests/components/fujitsu_fglair/ @crevetor
|
||||
/homeassistant/components/fully_kiosk/ @cgarwood
|
||||
/tests/components/fully_kiosk/ @cgarwood
|
||||
/homeassistant/components/fyta/ @dontinelli
|
||||
/tests/components/fyta/ @dontinelli
|
||||
/homeassistant/components/garages_amsterdam/ @klaasnicolaas
|
||||
/tests/components/garages_amsterdam/ @klaasnicolaas
|
||||
/homeassistant/components/gardena_bluetooth/ @elupus
|
||||
|
@ -527,7 +462,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/generic_hygrostat/ @Shulyaka
|
||||
/tests/components/generic_hygrostat/ @Shulyaka
|
||||
/homeassistant/components/geniushub/ @manzanotti
|
||||
/tests/components/geniushub/ @manzanotti
|
||||
/homeassistant/components/geo_json_events/ @exxamalte
|
||||
/tests/components/geo_json_events/ @exxamalte
|
||||
/homeassistant/components/geo_location/ @home-assistant/core
|
||||
|
@ -546,8 +480,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/github/ @timmo001 @ludeeus
|
||||
/homeassistant/components/glances/ @engrbm87
|
||||
/tests/components/glances/ @engrbm87
|
||||
/homeassistant/components/go2rtc/ @home-assistant/core
|
||||
/tests/components/go2rtc/ @home-assistant/core
|
||||
/homeassistant/components/goalzero/ @tkdrob
|
||||
/tests/components/goalzero/ @tkdrob
|
||||
/homeassistant/components/gogogate2/ @vangorra
|
||||
|
@ -560,14 +492,11 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/google_assistant/ @home-assistant/cloud
|
||||
/homeassistant/components/google_assistant_sdk/ @tronikos
|
||||
/tests/components/google_assistant_sdk/ @tronikos
|
||||
/homeassistant/components/google_cloud/ @lufton @tronikos
|
||||
/tests/components/google_cloud/ @lufton @tronikos
|
||||
/homeassistant/components/google_cloud/ @lufton
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos
|
||||
/homeassistant/components/google_mail/ @tkdrob
|
||||
/tests/components/google_mail/ @tkdrob
|
||||
/homeassistant/components/google_photos/ @allenporter
|
||||
/tests/components/google_photos/ @allenporter
|
||||
/homeassistant/components/google_sheets/ @tkdrob
|
||||
/tests/components/google_sheets/ @tkdrob
|
||||
/homeassistant/components/google_tasks/ @allenporter
|
||||
|
@ -588,14 +517,14 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/group/ @home-assistant/core
|
||||
/homeassistant/components/guardian/ @bachya
|
||||
/tests/components/guardian/ @bachya
|
||||
/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
||||
/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r
|
||||
/homeassistant/components/habitica/ @ASMfreaK @leikoilja
|
||||
/tests/components/habitica/ @ASMfreaK @leikoilja
|
||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||
/tests/components/hardkernel/ @home-assistant/core
|
||||
/homeassistant/components/hardware/ @home-assistant/core
|
||||
/tests/components/hardware/ @home-assistant/core
|
||||
/homeassistant/components/harmony/ @ehendrix23 @bdraco @mkeesey @Aohzan
|
||||
/tests/components/harmony/ @ehendrix23 @bdraco @mkeesey @Aohzan
|
||||
/homeassistant/components/harmony/ @ehendrix23 @bramkragten @bdraco @mkeesey @Aohzan
|
||||
/tests/components/harmony/ @ehendrix23 @bramkragten @bdraco @mkeesey @Aohzan
|
||||
/homeassistant/components/hassio/ @home-assistant/supervisor
|
||||
/tests/components/hassio/ @home-assistant/supervisor
|
||||
/homeassistant/components/hdmi_cec/ @inytar
|
||||
|
@ -619,8 +548,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/hlk_sw16/ @jameshilliard
|
||||
/homeassistant/components/holiday/ @jrieger @gjohansson-ST
|
||||
/tests/components/holiday/ @jrieger @gjohansson-ST
|
||||
/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98
|
||||
/tests/components/home_connect/ @DavidMStraub @Diegorro98
|
||||
/homeassistant/components/home_connect/ @DavidMStraub
|
||||
/tests/components/home_connect/ @DavidMStraub
|
||||
/homeassistant/components/homeassistant/ @home-assistant/core
|
||||
/tests/components/homeassistant/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||
|
@ -637,16 +566,12 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/homekit/ @bdraco
|
||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||
/tests/components/homekit_controller/ @Jc2k @bdraco
|
||||
/homeassistant/components/homematic/ @pvizeli
|
||||
/tests/components/homematic/ @pvizeli
|
||||
/homeassistant/components/homematicip_cloud/ @hahn-th
|
||||
/tests/components/homematicip_cloud/ @hahn-th
|
||||
/homeassistant/components/homematic/ @pvizeli @danielperna84
|
||||
/tests/components/homematic/ @pvizeli @danielperna84
|
||||
/homeassistant/components/homewizard/ @DCSBL
|
||||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
/tests/components/honeywell/ @rdfurman @mkmer
|
||||
/homeassistant/components/html5/ @alexyao2015
|
||||
/tests/components/html5/ @alexyao2015
|
||||
/homeassistant/components/http/ @home-assistant/core
|
||||
/tests/components/http/ @home-assistant/core
|
||||
/homeassistant/components/huawei_lte/ @scop @fphammerle
|
||||
|
@ -659,16 +584,12 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/humidifier/ @home-assistant/core @Shulyaka
|
||||
/homeassistant/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
||||
/tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
||||
/homeassistant/components/husqvarna_automower/ @Thomas55555
|
||||
/tests/components/husqvarna_automower/ @Thomas55555
|
||||
/homeassistant/components/husqvarna_automower_ble/ @alistair23
|
||||
/tests/components/husqvarna_automower_ble/ @alistair23
|
||||
/homeassistant/components/huum/ @frwickst
|
||||
/tests/components/huum/ @frwickst
|
||||
/homeassistant/components/hvv_departures/ @vigonotion
|
||||
/tests/components/hvv_departures/ @vigonotion
|
||||
/homeassistant/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
|
||||
/tests/components/hydrawise/ @dknowles2 @thomaskistler @ptcryan
|
||||
/homeassistant/components/hydrawise/ @dknowles2 @ptcryan
|
||||
/tests/components/hydrawise/ @dknowles2 @ptcryan
|
||||
/homeassistant/components/hyperion/ @dermotduffy
|
||||
/tests/components/hyperion/ @dermotduffy
|
||||
/homeassistant/components/ialarm/ @RyuzakiKK
|
||||
|
@ -692,12 +613,9 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/image_upload/ @home-assistant/core
|
||||
/homeassistant/components/imap/ @jbouwh
|
||||
/tests/components/imap/ @jbouwh
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
/tests/components/incomfort/ @jbouwh
|
||||
/homeassistant/components/incomfort/ @zxdavb
|
||||
/homeassistant/components/influxdb/ @mdegat01
|
||||
/tests/components/influxdb/ @mdegat01
|
||||
/homeassistant/components/inkbird/ @bdraco
|
||||
|
@ -727,8 +645,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/tests/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/homeassistant/components/iperf3/ @rohankapoorcom
|
||||
/homeassistant/components/ipma/ @dgomes
|
||||
/tests/components/ipma/ @dgomes
|
||||
|
@ -737,20 +653,10 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/iqvia/ @bachya
|
||||
/tests/components/iqvia/ @bachya
|
||||
/homeassistant/components/irish_rail_transport/ @ttroy50
|
||||
/homeassistant/components/iron_os/ @tr4nt0r
|
||||
/tests/components/iron_os/ @tr4nt0r
|
||||
/homeassistant/components/isal/ @bdraco
|
||||
/tests/components/isal/ @bdraco
|
||||
/homeassistant/components/iskra/ @iskramis
|
||||
/tests/components/iskra/ @iskramis
|
||||
/homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||
/tests/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||
/homeassistant/components/israel_rail/ @shaiu
|
||||
/tests/components/israel_rail/ @shaiu
|
||||
/homeassistant/components/islamic_prayer_times/ @engrbm87
|
||||
/tests/components/islamic_prayer_times/ @engrbm87
|
||||
/homeassistant/components/iss/ @DurgNomis-drol
|
||||
/tests/components/iss/ @DurgNomis-drol
|
||||
/homeassistant/components/ista_ecotrend/ @tr4nt0r
|
||||
/tests/components/ista_ecotrend/ @tr4nt0r
|
||||
/homeassistant/components/isy994/ @bdraco @shbatm
|
||||
/tests/components/isy994/ @bdraco @shbatm
|
||||
/homeassistant/components/izone/ @Swamp-Ig
|
||||
|
@ -781,8 +687,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/kitchen_sink/ @home-assistant/core
|
||||
/homeassistant/components/kmtronic/ @dgomes
|
||||
/tests/components/kmtronic/ @dgomes
|
||||
/homeassistant/components/knocki/ @joostlek @jgatto1 @JakeBosh
|
||||
/tests/components/knocki/ @joostlek @jgatto1 @JakeBosh
|
||||
/homeassistant/components/knx/ @Julius2342 @farmio @marvin-w
|
||||
/tests/components/knx/ @Julius2342 @farmio @marvin-w
|
||||
/homeassistant/components/kodi/ @OnFreund
|
||||
|
@ -819,22 +723,13 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/leaone/ @bdraco
|
||||
/homeassistant/components/led_ble/ @bdraco
|
||||
/tests/components/led_ble/ @bdraco
|
||||
/homeassistant/components/lektrico/ @lektrico
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/lg_netcast/ @Drafteed
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
/tests/components/lifx/ @Djelibeybi
|
||||
/homeassistant/components/light/ @home-assistant/core
|
||||
/tests/components/light/ @home-assistant/core
|
||||
/homeassistant/components/linear_garage_door/ @IceBotYT
|
||||
/tests/components/linear_garage_door/ @IceBotYT
|
||||
/homeassistant/components/linkplay/ @Velleman
|
||||
/tests/components/linkplay/ @Velleman
|
||||
/homeassistant/components/linux_battery/ @fabaff
|
||||
/homeassistant/components/litejet/ @joncar
|
||||
/tests/components/litejet/ @joncar
|
||||
|
@ -854,6 +749,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/logbook/ @home-assistant/core
|
||||
/homeassistant/components/logger/ @home-assistant/core
|
||||
/tests/components/logger/ @home-assistant/core
|
||||
/homeassistant/components/logi_circle/ @evanjd
|
||||
/tests/components/logi_circle/ @evanjd
|
||||
/homeassistant/components/london_underground/ @jpbede
|
||||
/tests/components/london_underground/ @jpbede
|
||||
/homeassistant/components/lookin/ @ANMalko @bdraco
|
||||
|
@ -869,20 +766,15 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/lupusec/ @majuss @suaveolent
|
||||
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
||||
/tests/components/lutron/ @cdheiser @wilburCForce
|
||||
/homeassistant/components/lutron_caseta/ @swails @danaues @eclair4151
|
||||
/tests/components/lutron_caseta/ @swails @danaues @eclair4151
|
||||
/homeassistant/components/lutron_caseta/ @swails @bdraco @danaues
|
||||
/tests/components/lutron_caseta/ @swails @bdraco @danaues
|
||||
/homeassistant/components/lyric/ @timmo001
|
||||
/tests/components/lyric/ @timmo001
|
||||
/homeassistant/components/madvr/ @iloveicedgreentea
|
||||
/tests/components/madvr/ @iloveicedgreentea
|
||||
/homeassistant/components/mastodon/ @fabaff @andrew-codechimp
|
||||
/tests/components/mastodon/ @fabaff @andrew-codechimp
|
||||
/homeassistant/components/mastodon/ @fabaff
|
||||
/homeassistant/components/matrix/ @PaarthShah
|
||||
/tests/components/matrix/ @PaarthShah
|
||||
/homeassistant/components/matter/ @home-assistant/matter
|
||||
/tests/components/matter/ @home-assistant/matter
|
||||
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||
/tests/components/meater/ @Sotolotl @emontnemery
|
||||
/homeassistant/components/medcom_ble/ @elafargue
|
||||
|
@ -894,8 +786,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/media_source/ @hunterjm
|
||||
/tests/components/media_source/ @hunterjm
|
||||
/homeassistant/components/mediaroom/ @dgomes
|
||||
/homeassistant/components/melcloud/ @erwindouna
|
||||
/tests/components/melcloud/ @erwindouna
|
||||
/homeassistant/components/melcloud/ @vilppuvuorinen
|
||||
/tests/components/melcloud/ @vilppuvuorinen
|
||||
/homeassistant/components/melissa/ @kennedyshead
|
||||
/tests/components/melissa/ @kennedyshead
|
||||
/homeassistant/components/melnor/ @vanstinator
|
||||
|
@ -911,8 +803,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/meteoclimatic/ @adrianmo
|
||||
/homeassistant/components/metoffice/ @MrHarcombe @avee87
|
||||
/tests/components/metoffice/ @MrHarcombe @avee87
|
||||
/homeassistant/components/microbees/ @microBeesTech
|
||||
/tests/components/microbees/ @microBeesTech
|
||||
/homeassistant/components/mikrotik/ @engrbm87
|
||||
/tests/components/mikrotik/ @engrbm87
|
||||
/homeassistant/components/mill/ @danielhiversen
|
||||
|
@ -927,37 +817,31 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/moat/ @bdraco
|
||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||
/tests/components/mobile_app/ @home-assistant/core
|
||||
/homeassistant/components/modbus/ @janiversen
|
||||
/tests/components/modbus/ @janiversen
|
||||
/homeassistant/components/modem_callerid/ @tkdrob
|
||||
/tests/components/modem_callerid/ @tkdrob
|
||||
/homeassistant/components/modern_forms/ @wonderslug
|
||||
/tests/components/modern_forms/ @wonderslug
|
||||
/homeassistant/components/moehlenhoff_alpha2/ @j-a-n
|
||||
/tests/components/moehlenhoff_alpha2/ @j-a-n
|
||||
/homeassistant/components/monarch_money/ @jeeftor
|
||||
/tests/components/monarch_money/ @jeeftor
|
||||
/homeassistant/components/monoprice/ @etsinko @OnFreund
|
||||
/tests/components/monoprice/ @etsinko @OnFreund
|
||||
/homeassistant/components/monzo/ @jakemartin-icl
|
||||
/tests/components/monzo/ @jakemartin-icl
|
||||
/homeassistant/components/moon/ @fabaff @frenck
|
||||
/tests/components/moon/ @fabaff @frenck
|
||||
/homeassistant/components/mopeka/ @bdraco
|
||||
/tests/components/mopeka/ @bdraco
|
||||
/homeassistant/components/motion_blinds/ @starkillerOG
|
||||
/tests/components/motion_blinds/ @starkillerOG
|
||||
/homeassistant/components/motionblinds_ble/ @LennP @jerrybboy
|
||||
/tests/components/motionblinds_ble/ @LennP @jerrybboy
|
||||
/homeassistant/components/motioneye/ @dermotduffy
|
||||
/tests/components/motioneye/ @dermotduffy
|
||||
/homeassistant/components/motionmount/ @RJPoelstra
|
||||
/tests/components/motionmount/ @RJPoelstra
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant
|
||||
/tests/components/music_assistant/ @music-assistant
|
||||
/homeassistant/components/mutesync/ @currentoor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
|
@ -966,16 +850,14 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/mysensors/ @MartinHjelmare @functionpointer
|
||||
/homeassistant/components/mystrom/ @fabaff
|
||||
/tests/components/mystrom/ @fabaff
|
||||
/homeassistant/components/myuplink/ @pajzo @astrandb
|
||||
/tests/components/myuplink/ @pajzo @astrandb
|
||||
/homeassistant/components/myuplink/ @pajzo
|
||||
/tests/components/myuplink/ @pajzo
|
||||
/homeassistant/components/nam/ @bieniu
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/neato/ @Santobert
|
||||
/tests/components/neato/ @Santobert
|
||||
/homeassistant/components/nanoleaf/ @milanmeu
|
||||
/tests/components/nanoleaf/ @milanmeu
|
||||
/homeassistant/components/neato/ @dshokouhi @Santobert
|
||||
/tests/components/neato/ @dshokouhi @Santobert
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
/homeassistant/components/ness_alarm/ @nickw444
|
||||
/tests/components/ness_alarm/ @nickw444
|
||||
|
@ -1002,8 +884,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/nfandroidtv/ @tkdrob
|
||||
/homeassistant/components/nibe_heatpump/ @elupus
|
||||
/tests/components/nibe_heatpump/ @elupus
|
||||
/homeassistant/components/nice_go/ @IceBotYT
|
||||
/tests/components/nice_go/ @IceBotYT
|
||||
/homeassistant/components/nightscout/ @marciogranzotto
|
||||
/tests/components/nightscout/ @marciogranzotto
|
||||
/homeassistant/components/nilu/ @hfurubotten
|
||||
|
@ -1014,8 +894,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||
/tests/components/nordpool/ @gjohansson-ST
|
||||
/homeassistant/components/notify/ @home-assistant/core
|
||||
/tests/components/notify/ @home-assistant/core
|
||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||
|
@ -1038,8 +916,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/nut/ @bdraco @ollo69 @pestevez
|
||||
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
|
||||
/tests/components/nws/ @MatthewFlamm @kamiyo
|
||||
/homeassistant/components/nyt_games/ @joostlek
|
||||
/tests/components/nyt_games/ @joostlek
|
||||
/homeassistant/components/nzbget/ @chriscla
|
||||
/tests/components/nzbget/ @chriscla
|
||||
/homeassistant/components/obihai/ @dshokouhi @ejpenney
|
||||
|
@ -1047,9 +923,9 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/octoprint/ @rfleming71
|
||||
/tests/components/octoprint/ @rfleming71
|
||||
/homeassistant/components/ohmconnect/ @robbiet480
|
||||
/homeassistant/components/ollama/ @synesthesiam
|
||||
/tests/components/ollama/ @synesthesiam
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
/homeassistant/components/omnilogic/ @oliver84 @djtimca @gentoosu
|
||||
/tests/components/omnilogic/ @oliver84 @djtimca @gentoosu
|
||||
/homeassistant/components/onboarding/ @home-assistant/core
|
||||
/tests/components/onboarding/ @home-assistant/core
|
||||
/homeassistant/components/oncue/ @bdraco @peterager
|
||||
|
@ -1058,8 +934,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz
|
||||
/tests/components/onkyo/ @arturpragacz
|
||||
/homeassistant/components/onvif/ @hunterjm
|
||||
/tests/components/onvif/ @hunterjm
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
|
@ -1095,16 +969,16 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/otbr/ @home-assistant/core
|
||||
/homeassistant/components/ourgroceries/ @OnFreund
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
/tests/components/p1_monitor/ @klaasnicolaas
|
||||
/homeassistant/components/palazzetti/ @dotvav
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/panel_iframe/ @home-assistant/frontend
|
||||
/tests/components/panel_iframe/ @home-assistant/frontend
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
/tests/components/peco/ @IceBotYT
|
||||
/homeassistant/components/pegel_online/ @mib1185
|
||||
|
@ -1115,10 +989,12 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/persistent_notification/ @home-assistant/core
|
||||
/homeassistant/components/philips_js/ @elupus
|
||||
/tests/components/philips_js/ @elupus
|
||||
/homeassistant/components/pi_hole/ @shenxn
|
||||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/pi_hole/ @johnluetke @shenxn
|
||||
/tests/components/pi_hole/ @johnluetke @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl
|
||||
/tests/components/picnic/ @corneyl
|
||||
/homeassistant/components/pilight/ @trekky12
|
||||
/tests/components/pilight/ @trekky12
|
||||
/homeassistant/components/ping/ @jpbede
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
|
@ -1148,8 +1024,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/prusalink/ @balloob
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/prusalink/ @balloob @Skaronator
|
||||
/tests/components/prusalink/ @balloob @Skaronator
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||
|
@ -1166,12 +1042,10 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/pvoutput/ @frenck
|
||||
/homeassistant/components/pvpc_hourly_pricing/ @azogue
|
||||
/tests/components/pvpc_hourly_pricing/ @azogue
|
||||
/homeassistant/components/pyload/ @tr4nt0r
|
||||
/tests/components/pyload/ @tr4nt0r
|
||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/tests/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/homeassistant/components/qingping/ @bdraco
|
||||
/tests/components/qingping/ @bdraco
|
||||
/homeassistant/components/qingping/ @bdraco @skgsergio
|
||||
/tests/components/qingping/ @bdraco @skgsergio
|
||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||
/tests/components/qld_bushfire/ @exxamalte
|
||||
/homeassistant/components/qnap/ @disforw
|
||||
|
@ -1217,6 +1091,7 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/recovery_mode/ @home-assistant/core
|
||||
/homeassistant/components/refoss/ @ashionky
|
||||
/tests/components/refoss/ @ashionky
|
||||
/homeassistant/components/rejseplanen/ @DarkFox
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/renault/ @epenet
|
||||
|
@ -1246,14 +1121,14 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/homeassistant/components/rmvtransport/ @cgtobi
|
||||
/tests/components/rmvtransport/ @cgtobi
|
||||
/homeassistant/components/roborock/ @Lash-L
|
||||
/tests/components/roborock/ @Lash-L
|
||||
/homeassistant/components/roborock/ @humbertogontijo @Lash-L
|
||||
/tests/components/roborock/ @humbertogontijo @Lash-L
|
||||
/homeassistant/components/roku/ @ctalkington
|
||||
/tests/components/roku/ @ctalkington
|
||||
/homeassistant/components/romy/ @xeniter
|
||||
/tests/components/romy/ @xeniter
|
||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1
|
||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1
|
||||
/homeassistant/components/roon/ @pavoni
|
||||
/tests/components/roon/ @pavoni
|
||||
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
||||
|
@ -1264,21 +1139,17 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/rtsp_to_webrtc/ @allenporter
|
||||
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/homeassistant/components/russound_rio/ @noahhusby
|
||||
/tests/components/russound_rio/ @noahhusby
|
||||
/homeassistant/components/ruuvi_gateway/ @akx
|
||||
/tests/components/ruuvi_gateway/ @akx
|
||||
/homeassistant/components/ruuvitag_ble/ @akx
|
||||
/tests/components/ruuvitag_ble/ @akx
|
||||
/homeassistant/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/tests/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/homeassistant/components/sabnzbd/ @shaiu @jpbede
|
||||
/tests/components/sabnzbd/ @shaiu @jpbede
|
||||
/homeassistant/components/sabnzbd/ @shaiu
|
||||
/tests/components/sabnzbd/ @shaiu
|
||||
/homeassistant/components/saj/ @fredericvl
|
||||
/homeassistant/components/samsungtv/ @chemelli74 @epenet
|
||||
/tests/components/samsungtv/ @chemelli74 @epenet
|
||||
/homeassistant/components/sanix/ @tomaszsluszniak
|
||||
/tests/components/sanix/ @tomaszsluszniak
|
||||
/homeassistant/components/scene/ @home-assistant/core
|
||||
/tests/components/scene/ @home-assistant/core
|
||||
/homeassistant/components/schedule/ @home-assistant/core
|
||||
|
@ -1310,16 +1181,12 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/sensorpro/ @bdraco
|
||||
/homeassistant/components/sensorpush/ @bdraco
|
||||
/tests/components/sensorpush/ @bdraco
|
||||
/homeassistant/components/sensoterra/ @markruys
|
||||
/tests/components/sensoterra/ @markruys
|
||||
/homeassistant/components/sentry/ @dcramer @frenck
|
||||
/tests/components/sentry/ @dcramer @frenck
|
||||
/homeassistant/components/senz/ @milanmeu
|
||||
/tests/components/senz/ @milanmeu
|
||||
/homeassistant/components/serial/ @fabaff
|
||||
/homeassistant/components/seven_segments/ @fabaff
|
||||
/homeassistant/components/seventeentrack/ @shaiu
|
||||
/tests/components/seventeentrack/ @shaiu
|
||||
/homeassistant/components/sfr_box/ @epenet
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
|
@ -1335,8 +1202,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/sighthound/ @robmarkcole
|
||||
/homeassistant/components/signal_messenger/ @bbernhard
|
||||
/tests/components/signal_messenger/ @bbernhard
|
||||
/homeassistant/components/simplefin/ @scottg489 @jeeftor
|
||||
/tests/components/simplefin/ @scottg489 @jeeftor
|
||||
/homeassistant/components/simplepush/ @engrbm87
|
||||
/tests/components/simplepush/ @engrbm87
|
||||
/homeassistant/components/simplisafe/ @bachya
|
||||
|
@ -1346,8 +1211,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
/tests/components/skybell/ @tkdrob
|
||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||
|
@ -1363,27 +1226,23 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @andrewsayre
|
||||
/tests/components/smartthings/ @andrewsayre
|
||||
/homeassistant/components/smarttub/ @mdz
|
||||
/tests/components/smarttub/ @mdz
|
||||
/homeassistant/components/smarty/ @z0mbieprocess
|
||||
/tests/components/smarty/ @z0mbieprocess
|
||||
/homeassistant/components/smhi/ @gjohansson-ST
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/smlight/ @tl-sl
|
||||
/tests/components/smlight/ @tl-sl
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/tests/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snmp/ @nmaggioni
|
||||
/tests/components/snmp/ @nmaggioni
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||
/tests/components/solaredge/ @frenck @bdraco
|
||||
/homeassistant/components/solaredge/ @frenck
|
||||
/tests/components/solaredge/ @frenck
|
||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
/homeassistant/components/solarlog/ @Ernst79
|
||||
/tests/components/solarlog/ @Ernst79
|
||||
/homeassistant/components/solax/ @squishykid
|
||||
/tests/components/solax/ @squishykid
|
||||
/homeassistant/components/soma/ @ratsept @sebfortier2288
|
||||
|
@ -1392,21 +1251,23 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/sonarr/ @ctalkington
|
||||
/homeassistant/components/songpal/ @rytilahti @shenxn
|
||||
/tests/components/songpal/ @rytilahti @shenxn
|
||||
/homeassistant/components/sonos/ @jjlawren @peterager
|
||||
/tests/components/sonos/ @jjlawren @peterager
|
||||
/homeassistant/components/sonos/ @jjlawren
|
||||
/tests/components/sonos/ @jjlawren
|
||||
/homeassistant/components/soundtouch/ @kroimon
|
||||
/tests/components/soundtouch/ @kroimon
|
||||
/homeassistant/components/spaceapi/ @fabaff
|
||||
/tests/components/spaceapi/ @fabaff
|
||||
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/homeassistant/components/spider/ @peternijssen
|
||||
/tests/components/spider/ @peternijssen
|
||||
/homeassistant/components/splunk/ @Bre77
|
||||
/homeassistant/components/spotify/ @frenck @joostlek
|
||||
/tests/components/spotify/ @frenck @joostlek
|
||||
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
/tests/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
/homeassistant/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||
/tests/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||
/homeassistant/components/squeezebox/ @rajlaud
|
||||
/tests/components/squeezebox/ @rajlaud
|
||||
/homeassistant/components/srp_energy/ @briglx
|
||||
/tests/components/srp_energy/ @briglx
|
||||
/homeassistant/components/starline/ @anonym-tsk
|
||||
|
@ -1430,8 +1291,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/stt/ @home-assistant/core
|
||||
/homeassistant/components/subaru/ @G-Two
|
||||
/tests/components/subaru/ @G-Two
|
||||
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/suez_water/ @ooii
|
||||
/tests/components/suez_water/ @ooii
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/sunweg/ @rokam
|
||||
|
@ -1450,10 +1311,10 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/switchbee/ @jafar-atili
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/tests/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav
|
||||
/homeassistant/components/switcher_kis/ @thecode
|
||||
/tests/components/switcher_kis/ @thecode
|
||||
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
|
||||
/homeassistant/components/syncthing/ @zhulik
|
||||
/tests/components/syncthing/ @zhulik
|
||||
|
@ -1489,10 +1350,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
/tests/components/tesla_wall_connector/ @einarhauks
|
||||
/homeassistant/components/teslemetry/ @Bre77
|
||||
|
@ -1506,8 +1365,7 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/thermobeacon/ @bdraco
|
||||
/homeassistant/components/thermopro/ @bdraco @h3ss
|
||||
/tests/components/thermopro/ @bdraco @h3ss
|
||||
/homeassistant/components/thethingsnetwork/ @angelnu
|
||||
/tests/components/thethingsnetwork/ @angelnu
|
||||
/homeassistant/components/thethingsnetwork/ @fabaff
|
||||
/homeassistant/components/thread/ @home-assistant/core
|
||||
/tests/components/thread/ @home-assistant/core
|
||||
/homeassistant/components/tibber/ @danielhiversen
|
||||
|
@ -1531,10 +1389,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/tomorrowio/ @raman325 @lymanepp
|
||||
/homeassistant/components/totalconnect/ @austinmroczek
|
||||
/tests/components/totalconnect/ @austinmroczek
|
||||
/homeassistant/components/touchline_sl/ @jnsgruk
|
||||
/tests/components/touchline_sl/ @jnsgruk
|
||||
/homeassistant/components/tplink/ @rytilahti @bdraco @sdb9696
|
||||
/tests/components/tplink/ @rytilahti @bdraco @sdb9696
|
||||
/homeassistant/components/tplink/ @rytilahti @thegardenmonkey @bdraco @sdb9696
|
||||
/tests/components/tplink/ @rytilahti @thegardenmonkey @bdraco @sdb9696
|
||||
/homeassistant/components/tplink_omada/ @MarkGodwin
|
||||
/tests/components/tplink_omada/ @MarkGodwin
|
||||
/homeassistant/components/traccar/ @ludeeus
|
||||
|
@ -1557,8 +1413,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/transmission/ @engrbm87 @JPHutchins
|
||||
/homeassistant/components/trend/ @jpbede
|
||||
/tests/components/trend/ @jpbede
|
||||
/homeassistant/components/triggercmd/ @rvmey
|
||||
/tests/components/triggercmd/ @rvmey
|
||||
/homeassistant/components/tts/ @home-assistant/core
|
||||
/tests/components/tts/ @home-assistant/core
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
|
@ -1575,6 +1429,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/unifi/ @Kane610
|
||||
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
|
||||
/homeassistant/components/unifiled/ @florisvdk
|
||||
/homeassistant/components/unifiprotect/ @AngellusMortis @bdraco
|
||||
/tests/components/unifiprotect/ @AngellusMortis @bdraco
|
||||
/homeassistant/components/upb/ @gwww
|
||||
/tests/components/upb/ @gwww
|
||||
/homeassistant/components/upc_connect/ @pvizeli @fabaff
|
||||
|
@ -1598,14 +1454,13 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/v2c/ @dgomes
|
||||
/homeassistant/components/vacuum/ @home-assistant/core
|
||||
/tests/components/vacuum/ @home-assistant/core
|
||||
/homeassistant/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||
/homeassistant/components/vallox/ @andre-richter @slovdahl @viiru-
|
||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru-
|
||||
/homeassistant/components/valve/ @home-assistant/core
|
||||
/tests/components/valve/ @home-assistant/core
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum
|
||||
/homeassistant/components/velux/ @Julius2342
|
||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||
/tests/components/venstar/ @garbled1 @jhollowe
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
|
@ -1651,20 +1506,14 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/weather/ @home-assistant/core
|
||||
/homeassistant/components/weatherflow/ @natekspencer @jeeftor
|
||||
/tests/components/weatherflow/ @natekspencer @jeeftor
|
||||
/homeassistant/components/weatherflow_cloud/ @jeeftor
|
||||
/tests/components/weatherflow_cloud/ @jeeftor
|
||||
/homeassistant/components/weatherkit/ @tjhorner
|
||||
/tests/components/weatherkit/ @tjhorner
|
||||
/homeassistant/components/webhook/ @home-assistant/core
|
||||
/tests/components/webhook/ @home-assistant/core
|
||||
/homeassistant/components/webmin/ @autinerd
|
||||
/tests/components/webmin/ @autinerd
|
||||
/homeassistant/components/webostv/ @thecode
|
||||
/tests/components/webostv/ @thecode
|
||||
/homeassistant/components/websocket_api/ @home-assistant/core
|
||||
/tests/components/websocket_api/ @home-assistant/core
|
||||
/homeassistant/components/weheat/ @jesperraemaekers
|
||||
/tests/components/weheat/ @jesperraemaekers
|
||||
/homeassistant/components/wemo/ @esev
|
||||
/tests/components/wemo/ @esev
|
||||
/homeassistant/components/whirlpool/ @abmantis @mkmer
|
||||
|
@ -1682,10 +1531,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
/tests/components/wmspro/ @mback2k
|
||||
/homeassistant/components/wolflink/ @adamkrol93 @mtielen
|
||||
/tests/components/wolflink/ @adamkrol93 @mtielen
|
||||
/homeassistant/components/wolflink/ @adamkrol93
|
||||
/tests/components/wolflink/ @adamkrol93
|
||||
/homeassistant/components/workday/ @fabaff @gjohansson-ST
|
||||
/tests/components/workday/ @fabaff @gjohansson-ST
|
||||
/homeassistant/components/worldclock/ @fabaff
|
||||
|
@ -1704,8 +1551,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/xiaomi_miio/ @rytilahti @syssi @starkillerOG
|
||||
/homeassistant/components/xiaomi_tv/ @simse
|
||||
/homeassistant/components/xmpp/ @fabaff @flowolf
|
||||
/homeassistant/components/yale/ @bdraco
|
||||
/tests/components/yale/ @bdraco
|
||||
/homeassistant/components/yale_smart_alarm/ @gjohansson-ST
|
||||
/tests/components/yale_smart_alarm/ @gjohansson-ST
|
||||
/homeassistant/components/yalexs_ble/ @bdraco
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socioeconomic status,
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
|
|
52
Dockerfile
52
Dockerfile
|
@ -6,38 +6,47 @@ FROM ${BUILD_FROM}
|
|||
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
S6_SERVICES_GRACETIME=240000
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.0
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
## Setup Home Assistant Core dependencies
|
||||
COPY requirements.txt homeassistant/
|
||||
COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
|
||||
RUN \
|
||||
uv pip install \
|
||||
--no-build \
|
||||
pip3 install \
|
||||
--only-binary=:all: \
|
||||
-r homeassistant/requirements.txt
|
||||
|
||||
COPY requirements_all.txt home_assistant_frontend-* home_assistant_intents-* homeassistant/
|
||||
RUN \
|
||||
if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \
|
||||
uv pip install homeassistant/home_assistant_*.whl; \
|
||||
if ls homeassistant/home_assistant_frontend*.whl 1> /dev/null 2>&1; then \
|
||||
pip3 install homeassistant/home_assistant_frontend-*.whl; \
|
||||
fi \
|
||||
&& uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt
|
||||
&& if ls homeassistant/home_assistant_intents*.whl 1> /dev/null 2>&1; then \
|
||||
pip3 install homeassistant/home_assistant_intents-*.whl; \
|
||||
fi \
|
||||
&& if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
|
||||
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
|
||||
linux32 pip3 install \
|
||||
--only-binary=:all: \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
else \
|
||||
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
|
||||
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
|
||||
pip3 install \
|
||||
--only-binary=:all: \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
fi
|
||||
|
||||
## Setup Home Assistant Core
|
||||
COPY . homeassistant/
|
||||
RUN \
|
||||
uv pip install \
|
||||
pip3 install \
|
||||
--only-binary=:all: \
|
||||
-e ./homeassistant \
|
||||
&& python3 -m compileall \
|
||||
homeassistant/homeassistant
|
||||
|
@ -45,19 +54,4 @@ RUN \
|
|||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
WORKDIR /config
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM mcr.microsoft.com/devcontainers/python:1-3.12
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.11
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
|
@ -22,7 +22,6 @@ RUN \
|
|||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswscale-dev \
|
||||
libswresample-dev \
|
||||
libavfilter-dev \
|
||||
|
@ -35,34 +34,21 @@ RUN \
|
|||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
|
||||
&& uv pip install --system -e hass-release/ \
|
||||
&& chown -R vscode /usr/src/hass-release/data
|
||||
&& pip3 install -e hass-release/
|
||||
|
||||
USER vscode
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
RUN uv pip install -r requirements.txt
|
||||
RUN pip3 install -r requirements.txt
|
||||
COPY requirements_test.txt requirements_test_pre_commit.txt ./
|
||||
RUN uv pip install -r requirements_test.txt
|
||||
|
||||
WORKDIR /workspaces
|
||||
RUN pip3 install -r requirements_test.txt
|
||||
RUN rm -rf requirements.txt requirements_test.txt requirements_test_pre_commit.txt homeassistant/
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
||||
|
|
|
@ -20,14 +20,9 @@ components <https://developers.home-assistant.io/docs/creating_component_index/>
|
|||
If you run into issues while using Home Assistant or during development
|
||||
of a component, check the `Home Assistant help section <https://home-assistant.io/help/>`__ of our website for further help and information.
|
||||
|
||||
|ohf-logo|
|
||||
|
||||
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
||||
:target: https://www.home-assistant.io/join-chat/
|
||||
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png
|
||||
:target: https://demo.home-assistant.io
|
||||
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png
|
||||
:target: https://home-assistant.io/integrations/
|
||||
.. |ohf-logo| image:: https://www.openhomefoundation.org/badges/home-assistant.png
|
||||
:alt: Home Assistant - A project from the Open Home Foundation
|
||||
:target: https://www.openhomefoundation.org/
|
||||
:target: https://home-assistant.io/integrations/
|
10
build.yaml
10
build.yaml
|
@ -1,10 +1,10 @@
|
|||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.02.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.02.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.02.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.02.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.02.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
|
|
@ -4,7 +4,7 @@ coverage:
|
|||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
target: 90
|
||||
threshold: 0.09
|
||||
required:
|
||||
target: auto
|
||||
|
|
|
@ -1,15 +1,12 @@
|
|||
"""Start Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from contextlib import suppress
|
||||
import faulthandler
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .backup_restore import restore_backup
|
||||
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
||||
|
||||
FAULT_LOG_FILENAME = "home-assistant.log.fault"
|
||||
|
@ -148,7 +145,9 @@ def get_arguments() -> argparse.Namespace:
|
|||
help="Skips validation of operating system",
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
arguments = parser.parse_args()
|
||||
|
||||
return arguments
|
||||
|
||||
|
||||
def check_threads() -> None:
|
||||
|
@ -183,9 +182,6 @@ def main() -> int:
|
|||
return scripts.run(args.script)
|
||||
|
||||
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
|
||||
if restore_backup(config_dir):
|
||||
return RESTART_EXIT_CODE
|
||||
|
||||
ensure_config_path(config_dir)
|
||||
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
|
@ -213,10 +209,8 @@ def main() -> int:
|
|||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
|
||||
check_threads()
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Provide an authentication layer for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
@ -12,6 +11,7 @@ from typing import Any, cast
|
|||
|
||||
import jwt
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HassJob,
|
||||
|
@ -19,24 +19,22 @@ from homeassistant.core import (
|
|||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import auth_store, jwt_wrapper, models
|
||||
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRATION
|
||||
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
|
||||
from .models import AuthFlowContext, AuthFlowResult
|
||||
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
|
||||
from .providers.homeassistant import HassAuthProvider
|
||||
|
||||
EVENT_USER_ADDED = "user_added"
|
||||
EVENT_USER_UPDATED = "user_updated"
|
||||
EVENT_USER_REMOVED = "user_removed"
|
||||
|
||||
type _MfaModuleDict = dict[str, MultiFactorAuthModule]
|
||||
type _ProviderKey = tuple[str, str | None]
|
||||
type _ProviderDict = dict[_ProviderKey, AuthProvider]
|
||||
_MfaModuleDict = dict[str, MultiFactorAuthModule]
|
||||
_ProviderKey = tuple[str, str | None]
|
||||
_ProviderDict = dict[_ProviderKey, AuthProvider]
|
||||
|
||||
|
||||
class InvalidAuthError(Exception):
|
||||
|
@ -54,7 +52,7 @@ async def auth_manager_from_config(
|
|||
) -> AuthManager:
|
||||
"""Initialize an auth manager from config.
|
||||
|
||||
CORE_CONFIG_SCHEMA will make sure no duplicated auth providers or
|
||||
CORE_CONFIG_SCHEMA will make sure do duplicated auth providers or
|
||||
mfa modules exist in configs.
|
||||
"""
|
||||
store = auth_store.AuthStore(hass)
|
||||
|
@ -74,13 +72,6 @@ async def auth_manager_from_config(
|
|||
key = (provider.type, provider.id)
|
||||
provider_hash[key] = provider
|
||||
|
||||
if isinstance(provider, HassAuthProvider):
|
||||
# Can be removed in 2026.7 with the legacy mode of homeassistant auth provider
|
||||
# We need to initialize the provider to create the repair if needed as otherwise
|
||||
# the provider will be initialized on first use, which could be rare as users
|
||||
# don't frequently change auth settings
|
||||
await provider.async_initialize()
|
||||
|
||||
if module_configs:
|
||||
modules = await asyncio.gather(
|
||||
*(auth_mfa_module_from_config(hass, config) for config in module_configs)
|
||||
|
@ -93,17 +84,13 @@ async def auth_manager_from_config(
|
|||
module_hash[module.id] = module
|
||||
|
||||
manager = AuthManager(hass, store, provider_hash, module_hash)
|
||||
await manager.async_setup()
|
||||
manager.async_setup()
|
||||
return manager
|
||||
|
||||
|
||||
class AuthManagerFlowManager(
|
||||
FlowManager[AuthFlowContext, AuthFlowResult, tuple[str, str]]
|
||||
):
|
||||
class AuthManagerFlowManager(data_entry_flow.FlowManager):
|
||||
"""Manage authentication flows."""
|
||||
|
||||
_flow_result = AuthFlowResult
|
||||
|
||||
def __init__(self, hass: HomeAssistant, auth_manager: AuthManager) -> None:
|
||||
"""Init auth manager flows."""
|
||||
super().__init__(hass)
|
||||
|
@ -111,11 +98,11 @@ class AuthManagerFlowManager(
|
|||
|
||||
async def async_create_flow(
|
||||
self,
|
||||
handler_key: tuple[str, str],
|
||||
handler_key: str,
|
||||
*,
|
||||
context: AuthFlowContext | None = None,
|
||||
context: dict[str, Any] | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
) -> LoginFlow:
|
||||
) -> data_entry_flow.FlowHandler:
|
||||
"""Create a login flow."""
|
||||
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
|
||||
if not auth_provider:
|
||||
|
@ -123,18 +110,12 @@ class AuthManagerFlowManager(
|
|||
return await auth_provider.async_login_flow(context)
|
||||
|
||||
async def async_finish_flow(
|
||||
self,
|
||||
flow: FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||
result: AuthFlowResult,
|
||||
) -> AuthFlowResult:
|
||||
"""Return a user as result of login flow.
|
||||
|
||||
This method is called when a flow step returns FlowResultType.ABORT or
|
||||
FlowResultType.CREATE_ENTRY.
|
||||
"""
|
||||
self, flow: data_entry_flow.FlowHandler, result: FlowResult
|
||||
) -> FlowResult:
|
||||
"""Return a user as result of login flow."""
|
||||
flow = cast(LoginFlow, flow)
|
||||
|
||||
if result["type"] != FlowResultType.CREATE_ENTRY:
|
||||
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
||||
return result
|
||||
|
||||
# we got final result
|
||||
|
@ -193,7 +174,8 @@ class AuthManager:
|
|||
self._async_remove_expired_refresh_tokens, job_type=HassJobType.Callback
|
||||
)
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
@callback
|
||||
def async_setup(self) -> None:
|
||||
"""Set up the auth manager."""
|
||||
hass = self.hass
|
||||
hass.async_add_shutdown_job(
|
||||
|
@ -367,15 +349,15 @@ class AuthManager:
|
|||
local_only: bool | None = None,
|
||||
) -> None:
|
||||
"""Update a user."""
|
||||
kwargs: dict[str, Any] = {
|
||||
attr_name: value
|
||||
for attr_name, value in (
|
||||
("name", name),
|
||||
("group_ids", group_ids),
|
||||
("local_only", local_only),
|
||||
)
|
||||
if value is not None
|
||||
}
|
||||
kwargs: dict[str, Any] = {}
|
||||
|
||||
for attr_name, value in (
|
||||
("name", name),
|
||||
("group_ids", group_ids),
|
||||
("local_only", local_only),
|
||||
):
|
||||
if value is not None:
|
||||
kwargs[attr_name] = value
|
||||
await self._store.async_update_user(user, **kwargs)
|
||||
|
||||
if is_active is not None:
|
||||
|
@ -386,13 +368,6 @@ class AuthManager:
|
|||
|
||||
self.hass.bus.async_fire(EVENT_USER_UPDATED, {"user_id": user.id})
|
||||
|
||||
@callback
|
||||
def async_update_user_credentials_data(
|
||||
self, credentials: models.Credentials, data: dict[str, Any]
|
||||
) -> None:
|
||||
"""Update credentials data."""
|
||||
self._store.async_update_user_credentials_data(credentials, data=data)
|
||||
|
||||
async def async_activate_user(self, user: models.User) -> None:
|
||||
"""Activate a user."""
|
||||
await self._store.async_activate_user(user)
|
||||
|
@ -535,13 +510,6 @@ class AuthManager:
|
|||
for revoke_callback in callbacks:
|
||||
revoke_callback()
|
||||
|
||||
@callback
|
||||
def async_set_expiry(
|
||||
self, refresh_token: models.RefreshToken, *, enable_expiry: bool
|
||||
) -> None:
|
||||
"""Enable or disable expiry of a refresh token."""
|
||||
self._store.async_set_expiry(refresh_token, enable_expiry=enable_expiry)
|
||||
|
||||
@callback
|
||||
def _async_remove_expired_refresh_tokens(self, _: datetime | None = None) -> None:
|
||||
"""Remove expired refresh tokens."""
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Storage for auth models."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
@ -31,17 +30,6 @@ GROUP_NAME_ADMIN = "Administrators"
|
|||
GROUP_NAME_USER = "Users"
|
||||
GROUP_NAME_READ_ONLY = "Read Only"
|
||||
|
||||
# We always save the auth store after we load it since
|
||||
# we may migrate data and do not want to have to do it again
|
||||
# but we don't want to do it during startup so we schedule
|
||||
# the first save 5 minutes out knowing something else may
|
||||
# want to save the auth store before then, and since Storage
|
||||
# will honor the lower of the two delays, it will save it
|
||||
# faster if something else saves it.
|
||||
INITIAL_LOAD_SAVE_DELAY = 300
|
||||
|
||||
DEFAULT_SAVE_DELAY = 1
|
||||
|
||||
|
||||
class AuthStore:
|
||||
"""Stores authentication info.
|
||||
|
@ -62,7 +50,6 @@ class AuthStore:
|
|||
self._store = Store[dict[str, list[dict[str, Any]]]](
|
||||
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
|
||||
)
|
||||
self._token_id_to_user_id: dict[str, str] = {}
|
||||
|
||||
async def async_get_groups(self) -> list[models.Group]:
|
||||
"""Retrieve all users."""
|
||||
|
@ -105,18 +92,14 @@ class AuthStore:
|
|||
"perm_lookup": self._perm_lookup,
|
||||
}
|
||||
|
||||
kwargs.update(
|
||||
{
|
||||
attr_name: value
|
||||
for attr_name, value in (
|
||||
("is_owner", is_owner),
|
||||
("is_active", is_active),
|
||||
("local_only", local_only),
|
||||
("system_generated", system_generated),
|
||||
)
|
||||
if value is not None
|
||||
}
|
||||
)
|
||||
for attr_name, value in (
|
||||
("is_owner", is_owner),
|
||||
("is_active", is_active),
|
||||
("local_only", local_only),
|
||||
("system_generated", system_generated),
|
||||
):
|
||||
if value is not None:
|
||||
kwargs[attr_name] = value
|
||||
|
||||
new_user = models.User(**kwargs)
|
||||
|
||||
|
@ -140,10 +123,7 @@ class AuthStore:
|
|||
|
||||
async def async_remove_user(self, user: models.User) -> None:
|
||||
"""Remove a user."""
|
||||
user = self._users.pop(user.id)
|
||||
for refresh_token_id in user.refresh_tokens:
|
||||
del self._token_id_to_user_id[refresh_token_id]
|
||||
user.refresh_tokens.clear()
|
||||
self._users.pop(user.id)
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_update_user(
|
||||
|
@ -226,9 +206,7 @@ class AuthStore:
|
|||
kwargs["client_icon"] = client_icon
|
||||
|
||||
refresh_token = models.RefreshToken(**kwargs)
|
||||
token_id = refresh_token.id
|
||||
user.refresh_tokens[token_id] = refresh_token
|
||||
self._token_id_to_user_id[token_id] = user.id
|
||||
user.refresh_tokens[refresh_token.id] = refresh_token
|
||||
|
||||
self._async_schedule_save()
|
||||
return refresh_token
|
||||
|
@ -236,17 +214,19 @@ class AuthStore:
|
|||
@callback
|
||||
def async_remove_refresh_token(self, refresh_token: models.RefreshToken) -> None:
|
||||
"""Remove a refresh token."""
|
||||
refresh_token_id = refresh_token.id
|
||||
if user_id := self._token_id_to_user_id.get(refresh_token_id):
|
||||
del self._users[user_id].refresh_tokens[refresh_token_id]
|
||||
del self._token_id_to_user_id[refresh_token_id]
|
||||
self._async_schedule_save()
|
||||
for user in self._users.values():
|
||||
if user.refresh_tokens.pop(refresh_token.id, None):
|
||||
self._async_schedule_save()
|
||||
break
|
||||
|
||||
@callback
|
||||
def async_get_refresh_token(self, token_id: str) -> models.RefreshToken | None:
|
||||
"""Get refresh token by id."""
|
||||
if user_id := self._token_id_to_user_id.get(token_id):
|
||||
return self._users[user_id].refresh_tokens.get(token_id)
|
||||
for user in self._users.values():
|
||||
refresh_token = user.refresh_tokens.get(token_id)
|
||||
if refresh_token is not None:
|
||||
return refresh_token
|
||||
|
||||
return None
|
||||
|
||||
@callback
|
||||
|
@ -285,29 +265,6 @@ class AuthStore:
|
|||
)
|
||||
self._async_schedule_save()
|
||||
|
||||
@callback
|
||||
def async_set_expiry(
|
||||
self, refresh_token: models.RefreshToken, *, enable_expiry: bool
|
||||
) -> None:
|
||||
"""Enable or disable expiry of a refresh token."""
|
||||
if enable_expiry:
|
||||
if refresh_token.expire_at is None:
|
||||
refresh_token.expire_at = (
|
||||
refresh_token.last_used_at or dt_util.utcnow()
|
||||
).timestamp() + REFRESH_TOKEN_EXPIRATION
|
||||
self._async_schedule_save()
|
||||
else:
|
||||
refresh_token.expire_at = None
|
||||
self._async_schedule_save()
|
||||
|
||||
@callback
|
||||
def async_update_user_credentials_data(
|
||||
self, credentials: models.Credentials, data: dict[str, Any]
|
||||
) -> None:
|
||||
"""Update credentials data."""
|
||||
credentials.data = data
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_load(self) -> None: # noqa: C901
|
||||
"""Load the users."""
|
||||
if self._loaded:
|
||||
|
@ -321,6 +278,8 @@ class AuthStore:
|
|||
perm_lookup = PermissionLookup(ent_reg, dev_reg)
|
||||
self._perm_lookup = perm_lookup
|
||||
|
||||
now_ts = dt_util.utcnow().timestamp()
|
||||
|
||||
if data is None or not isinstance(data, dict):
|
||||
self._set_defaults()
|
||||
return
|
||||
|
@ -474,6 +433,14 @@ class AuthStore:
|
|||
else:
|
||||
last_used_at = None
|
||||
|
||||
if (
|
||||
expire_at := rt_dict.get("expire_at")
|
||||
) is None and token_type == models.TOKEN_TYPE_NORMAL:
|
||||
if last_used_at:
|
||||
expire_at = last_used_at.timestamp() + REFRESH_TOKEN_EXPIRATION
|
||||
else:
|
||||
expire_at = now_ts + REFRESH_TOKEN_EXPIRATION
|
||||
|
||||
token = models.RefreshToken(
|
||||
id=rt_dict["id"],
|
||||
user=users[rt_dict["user_id"]],
|
||||
|
@ -490,7 +457,7 @@ class AuthStore:
|
|||
jwt_key=rt_dict["jwt_key"],
|
||||
last_used_at=last_used_at,
|
||||
last_used_ip=rt_dict.get("last_used_ip"),
|
||||
expire_at=rt_dict.get("expire_at"),
|
||||
expire_at=expire_at,
|
||||
version=rt_dict.get("version"),
|
||||
)
|
||||
if "credential_id" in rt_dict:
|
||||
|
@ -499,22 +466,13 @@ class AuthStore:
|
|||
|
||||
self._groups = groups
|
||||
self._users = users
|
||||
self._build_token_id_to_user_id()
|
||||
self._async_schedule_save(INITIAL_LOAD_SAVE_DELAY)
|
||||
|
||||
self._async_schedule_save()
|
||||
|
||||
@callback
|
||||
def _build_token_id_to_user_id(self) -> None:
|
||||
"""Build a map of token id to user id."""
|
||||
self._token_id_to_user_id = {
|
||||
token_id: user_id
|
||||
for user_id, user in self._users.items()
|
||||
for token_id in user.refresh_tokens
|
||||
}
|
||||
|
||||
@callback
|
||||
def _async_schedule_save(self, delay: float = DEFAULT_SAVE_DELAY) -> None:
|
||||
def _async_schedule_save(self) -> None:
|
||||
"""Save users."""
|
||||
self._store.async_delay_save(self._data_to_save, delay)
|
||||
self._store.async_delay_save(self._data_to_save, 1)
|
||||
|
||||
@callback
|
||||
def _data_to_save(self) -> dict[str, list[dict[str, Any]]]:
|
||||
|
@ -604,7 +562,6 @@ class AuthStore:
|
|||
read_only_group = _system_read_only_group()
|
||||
groups[read_only_group.id] = read_only_group
|
||||
self._groups = groups
|
||||
self._build_token_id_to_user_id()
|
||||
|
||||
|
||||
def _system_admin_group() -> models.Group:
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Constants for the auth module."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
ACCESS_TOKEN_EXPIRATION = timedelta(minutes=30)
|
||||
|
|
|
@ -4,7 +4,6 @@ Since we decode the same tokens over and over again
|
|||
we can cache the result of the decode of valid tokens
|
||||
to speed up the process.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
@ -78,7 +77,7 @@ class _PyJWTWithVerify(PyJWT):
|
|||
key: str,
|
||||
algorithms: list[str],
|
||||
issuer: str | None = None,
|
||||
leeway: float | timedelta = 0,
|
||||
leeway: int | float | timedelta = 0,
|
||||
options: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Verify a JWT's signature and claims."""
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Pluggable auth modules for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
|
@ -14,9 +14,7 @@ from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
|
|||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
MULTI_FACTOR_AUTH_MODULES: Registry[str, type[MultiFactorAuthModule]] = Registry()
|
||||
|
||||
|
@ -30,7 +28,7 @@ MULTI_FACTOR_AUTH_MODULE_SCHEMA = vol.Schema(
|
|||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
|
||||
DATA_REQS = "mfa_auth_module_reqs_processed"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -150,7 +148,7 @@ async def _load_mfa_module(hass: HomeAssistant, module_name: str) -> types.Modul
|
|||
module_path = f"homeassistant.auth.mfa_modules.{module_name}"
|
||||
|
||||
try:
|
||||
module = await async_import_module(hass, module_path)
|
||||
module = importlib.import_module(module_path)
|
||||
except ImportError as err:
|
||||
_LOGGER.error("Unable to load mfa module %s: %s", module_name, err)
|
||||
raise HomeAssistantError(
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Example auth module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
Sending HOTP through notify service
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
@ -88,7 +87,7 @@ class NotifySetting:
|
|||
target: str | None = attr.ib(default=None)
|
||||
|
||||
|
||||
type _UsersDict = dict[str, NotifySetting]
|
||||
_UsersDict = dict[str, NotifySetting]
|
||||
|
||||
|
||||
@MULTI_FACTOR_AUTH_MODULES.register("notify")
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Time-based One Time Password auth module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
|
|
@ -1,41 +1,32 @@
|
|||
"""Auth models."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
import secrets
|
||||
from typing import Any, NamedTuple
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
import uuid
|
||||
|
||||
import attr
|
||||
from attr import Attribute
|
||||
from attr.setters import validate
|
||||
from propcache import cached_property
|
||||
|
||||
from homeassistant.const import __version__
|
||||
from homeassistant.data_entry_flow import FlowContext, FlowResult
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import permissions as perm_mdl
|
||||
from .const import GROUP_ID_ADMIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from functools import cached_property
|
||||
else:
|
||||
from homeassistant.backports.functools import cached_property
|
||||
|
||||
|
||||
TOKEN_TYPE_NORMAL = "normal"
|
||||
TOKEN_TYPE_SYSTEM = "system"
|
||||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
|
||||
|
||||
|
||||
class AuthFlowContext(FlowContext, total=False):
|
||||
"""Typed context dict for auth flow."""
|
||||
|
||||
credential_only: bool
|
||||
ip_address: IPv4Address | IPv6Address
|
||||
redirect_uri: str
|
||||
|
||||
|
||||
AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]]
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Group:
|
||||
"""A group."""
|
||||
|
@ -96,7 +87,11 @@ class User:
|
|||
def invalidate_cache(self) -> None:
|
||||
"""Invalidate permission and is_admin cache."""
|
||||
for attr_to_invalidate in ("permissions", "is_admin"):
|
||||
self.__dict__.pop(attr_to_invalidate, None)
|
||||
# try is must more efficient than suppress
|
||||
try: # noqa: SIM105
|
||||
delattr(self, attr_to_invalidate)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""Permissions for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -63,7 +63,7 @@ class PolicyPermissions(AbstractPermissions):
|
|||
"""Return a function that can test entity access."""
|
||||
return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Equals check."""
|
||||
return isinstance(other, PolicyPermissions) and other._policy == self._policy
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Permission constants."""
|
||||
|
||||
CAT_ENTITIES = "entities"
|
||||
CAT_CONFIG_ENTRIES = "config_entries"
|
||||
SUBCAT_ALL = "all"
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Entity permissions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import OrderedDict
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
"""Permission for events."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Final
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import (
|
||||
EVENT_COMPONENT_LOADED,
|
||||
|
@ -18,17 +17,13 @@ from homeassistant.const import (
|
|||
EVENT_THEMES_UPDATED,
|
||||
)
|
||||
from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.category_registry import EVENT_CATEGORY_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.floor_registry import EVENT_FLOOR_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.label_registry import EVENT_LABEL_REGISTRY_UPDATED
|
||||
from homeassistant.util.event_type import EventType
|
||||
|
||||
# These are events that do not contain any sensitive data
|
||||
# Except for state_changed, which is handled accordingly.
|
||||
SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
|
||||
SUBSCRIBE_ALLOWLIST: Final[set[str]] = {
|
||||
EVENT_AREA_REGISTRY_UPDATED,
|
||||
EVENT_COMPONENT_LOADED,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
|
@ -44,7 +39,4 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = {
|
|||
EVENT_SHOPPING_LIST_UPDATED,
|
||||
EVENT_STATE_CHANGED,
|
||||
EVENT_THEMES_UPDATED,
|
||||
EVENT_LABEL_REGISTRY_UPDATED,
|
||||
EVENT_CATEGORY_REGISTRY_UPDATED,
|
||||
EVENT_FLOOR_REGISTRY_UPDATED,
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Merging of policies."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
@ -58,7 +57,10 @@ def _merge_policies(sources: list[CategoryType]) -> CategoryType:
|
|||
continue
|
||||
seen.add(key)
|
||||
|
||||
key_sources = [src.get(key) for src in sources if isinstance(src, dict)]
|
||||
key_sources = []
|
||||
for src in sources:
|
||||
if isinstance(src, dict):
|
||||
key_sources.append(src.get(key))
|
||||
|
||||
policy[key] = _merge_policies(key_sources)
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Models for permissions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""System policies."""
|
||||
|
||||
from .const import CAT_ENTITIES, POLICY_READ, SUBCAT_ALL
|
||||
|
||||
ADMIN_POLICY = {CAT_ENTITIES: True}
|
||||
|
|
|
@ -1,20 +1,19 @@
|
|||
"""Common code for permissions."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
|
||||
# MyPy doesn't support recursion yet. So writing it out as far as we need.
|
||||
|
||||
type ValueType = (
|
||||
ValueType = (
|
||||
# Example: entities.all = { read: true, control: true }
|
||||
Mapping[str, bool] | bool | None
|
||||
)
|
||||
|
||||
# Example: entities.domains = { light: … }
|
||||
type SubCategoryDict = Mapping[str, ValueType]
|
||||
SubCategoryDict = Mapping[str, ValueType]
|
||||
|
||||
type SubCategoryType = SubCategoryDict | bool | None
|
||||
SubCategoryType = SubCategoryDict | bool | None
|
||||
|
||||
type CategoryType = (
|
||||
CategoryType = (
|
||||
# Example: entities.domains
|
||||
Mapping[str, SubCategoryType]
|
||||
# Example: entities.all
|
||||
|
@ -24,4 +23,4 @@ type CategoryType = (
|
|||
)
|
||||
|
||||
# Example: { entities: … }
|
||||
type PolicyType = Mapping[str, CategoryType]
|
||||
PolicyType = Mapping[str, CategoryType]
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Helpers to deal with permissions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
@ -10,8 +9,8 @@ from .const import SUBCAT_ALL
|
|||
from .models import PermissionLookup
|
||||
from .types import CategoryType, SubCategoryDict, ValueType
|
||||
|
||||
type LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], ValueType | None]
|
||||
type SubCatLookupType = dict[str, LookupFunc]
|
||||
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], ValueType | None]
|
||||
SubCatLookupType = dict[str, LookupFunc]
|
||||
|
||||
|
||||
def lookup_all(
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""Auth providers for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import importlib
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
|
@ -10,29 +10,20 @@ from typing import Any
|
|||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from homeassistant import requirements
|
||||
from homeassistant import data_entry_flow, requirements
|
||||
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import FlowHandler
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from ..auth_store import AuthStore
|
||||
from ..const import MFA_SESSION_EXPIRATION
|
||||
from ..models import (
|
||||
AuthFlowContext,
|
||||
AuthFlowResult,
|
||||
Credentials,
|
||||
RefreshToken,
|
||||
User,
|
||||
UserMeta,
|
||||
)
|
||||
from ..models import Credentials, RefreshToken, User, UserMeta
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
DATA_REQS: HassKey[set[str]] = HassKey("auth_prov_reqs_processed")
|
||||
DATA_REQS = "auth_prov_reqs_processed"
|
||||
|
||||
AUTH_PROVIDERS: Registry[str, type[AuthProvider]] = Registry()
|
||||
|
||||
|
@ -105,7 +96,7 @@ class AuthProvider:
|
|||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return the data flow for logging in with auth provider.
|
||||
|
||||
Auth provider should extend LoginFlow and return an instance.
|
||||
|
@ -166,9 +157,7 @@ async def load_auth_provider_module(
|
|||
) -> types.ModuleType:
|
||||
"""Load an auth provider."""
|
||||
try:
|
||||
module = await async_import_module(
|
||||
hass, f"homeassistant.auth.providers.{provider}"
|
||||
)
|
||||
module = importlib.import_module(f"homeassistant.auth.providers.{provider}")
|
||||
except ImportError as err:
|
||||
_LOGGER.error("Unable to load auth provider %s: %s", provider, err)
|
||||
raise HomeAssistantError(
|
||||
|
@ -192,11 +181,9 @@ async def load_auth_provider_module(
|
|||
return module
|
||||
|
||||
|
||||
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
||||
class LoginFlow(data_entry_flow.FlowHandler):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
_flow_result = AuthFlowResult
|
||||
|
||||
def __init__(self, auth_provider: AuthProvider) -> None:
|
||||
"""Initialize the login flow."""
|
||||
self._auth_provider = auth_provider
|
||||
|
@ -210,7 +197,7 @@ class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
|||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> AuthFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle the first step of login flow.
|
||||
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
|
@ -220,7 +207,7 @@ class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
|||
|
||||
async def async_step_select_mfa_module(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> AuthFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle the step of select mfa module."""
|
||||
errors = {}
|
||||
|
||||
|
@ -245,7 +232,7 @@ class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
|||
|
||||
async def async_step_mfa(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> AuthFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle the step of mfa validation."""
|
||||
assert self.credential
|
||||
assert self.user
|
||||
|
@ -295,6 +282,6 @@ class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
|||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_finish(self, flow_result: Any) -> AuthFlowResult:
|
||||
async def async_finish(self, flow_result: Any) -> FlowResult:
|
||||
"""Handle the pass of login flow."""
|
||||
return self.async_create_entry(data=flow_result)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Auth provider that validates credentials via an external command."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
@ -11,9 +10,10 @@ from typing import Any, cast
|
|||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_COMMAND
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||
from ..models import Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
CONF_ARGS = "args"
|
||||
|
@ -59,7 +59,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||
super().__init__(*args, **kwargs)
|
||||
self._user_meta: dict[str, dict[str, Any]] = {}
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return CommandLineLoginFlow(self)
|
||||
|
||||
|
@ -138,7 +138,7 @@ class CommandLineLoginFlow(LoginFlow):
|
|||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> AuthFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Home Assistant auth provider."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
@ -13,11 +12,11 @@ import voluptuous as vol
|
|||
|
||||
from homeassistant.const import CONF_ID
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||
from ..models import Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
|
@ -55,27 +54,6 @@ class InvalidUser(HomeAssistantError):
|
|||
Will not be raised when validating authentication.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args: object,
|
||||
translation_key: str | None = None,
|
||||
translation_placeholders: dict[str, str] | None = None,
|
||||
) -> None:
|
||||
"""Initialize exception."""
|
||||
super().__init__(
|
||||
*args,
|
||||
translation_domain="auth",
|
||||
translation_key=translation_key,
|
||||
translation_placeholders=translation_placeholders,
|
||||
)
|
||||
|
||||
|
||||
class InvalidUsername(InvalidUser):
|
||||
"""Raised when invalid username is specified.
|
||||
|
||||
Will not be raised when validating authentication.
|
||||
"""
|
||||
|
||||
|
||||
class Data:
|
||||
"""Hold the user data."""
|
||||
|
@ -89,15 +67,13 @@ class Data:
|
|||
self._data: dict[str, list[dict[str, str]]] | None = None
|
||||
# Legacy mode will allow usernames to start/end with whitespace
|
||||
# and will compare usernames case-insensitive.
|
||||
# Deprecated in June 2019 and will be removed in 2026.7
|
||||
# Remove in 2020 or when we launch 1.0.
|
||||
self.is_legacy = False
|
||||
|
||||
@callback
|
||||
def normalize_username(
|
||||
self, username: str, *, force_normalize: bool = False
|
||||
) -> str:
|
||||
def normalize_username(self, username: str) -> str:
|
||||
"""Normalize a username based on the mode."""
|
||||
if self.is_legacy and not force_normalize:
|
||||
if self.is_legacy:
|
||||
return username
|
||||
|
||||
return username.strip().casefold()
|
||||
|
@ -107,49 +83,44 @@ class Data:
|
|||
if (data := await self._store.async_load()) is None:
|
||||
data = cast(dict[str, list[dict[str, str]]], {"users": []})
|
||||
|
||||
self._async_check_for_not_normalized_usernames(data)
|
||||
self._data = data
|
||||
|
||||
@callback
|
||||
def _async_check_for_not_normalized_usernames(
|
||||
self, data: dict[str, list[dict[str, str]]]
|
||||
) -> None:
|
||||
not_normalized_usernames: set[str] = set()
|
||||
seen: set[str] = set()
|
||||
|
||||
for user in data["users"]:
|
||||
username = user["username"]
|
||||
|
||||
if self.normalize_username(username, force_normalize=True) != username:
|
||||
# check if we have duplicates
|
||||
if (folded := username.casefold()) in seen:
|
||||
self.is_legacy = True
|
||||
|
||||
logging.getLogger(__name__).warning(
|
||||
(
|
||||
"Home Assistant auth provider is running in legacy mode "
|
||||
"because we detected usernames that are normalized (lowercase and without spaces)."
|
||||
" Please change the username: '%s'."
|
||||
"because we detected usernames that are case-insensitive"
|
||||
"equivalent. Please change the username: '%s'."
|
||||
),
|
||||
username,
|
||||
)
|
||||
not_normalized_usernames.add(username)
|
||||
|
||||
if not_normalized_usernames:
|
||||
self.is_legacy = True
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
"auth",
|
||||
"homeassistant_provider_not_normalized_usernames",
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="homeassistant_provider_not_normalized_usernames",
|
||||
translation_placeholders={
|
||||
"usernames": f'- "{'"\n- "'.join(sorted(not_normalized_usernames))}"'
|
||||
},
|
||||
learn_more_url="homeassistant://config/users",
|
||||
)
|
||||
else:
|
||||
self.is_legacy = False
|
||||
ir.async_delete_issue(
|
||||
self.hass, "auth", "homeassistant_provider_not_normalized_usernames"
|
||||
)
|
||||
break
|
||||
|
||||
seen.add(folded)
|
||||
|
||||
# check if we have unstripped usernames
|
||||
if username != username.strip():
|
||||
self.is_legacy = True
|
||||
|
||||
logging.getLogger(__name__).warning(
|
||||
(
|
||||
"Home Assistant auth provider is running in legacy mode "
|
||||
"because we detected usernames that start or end in a "
|
||||
"space. Please change the username: '%s'."
|
||||
),
|
||||
username,
|
||||
)
|
||||
|
||||
break
|
||||
|
||||
self._data = data
|
||||
|
||||
@property
|
||||
def users(self) -> list[dict[str, str]]:
|
||||
|
@ -191,11 +162,13 @@ class Data:
|
|||
return hashed
|
||||
|
||||
def add_auth(self, username: str, password: str) -> None:
|
||||
"""Add a new authenticated user/pass.
|
||||
"""Add a new authenticated user/pass."""
|
||||
username = self.normalize_username(username)
|
||||
|
||||
Raises InvalidUsername if the new username is invalid.
|
||||
"""
|
||||
self._validate_new_username(username)
|
||||
if any(
|
||||
self.normalize_username(user["username"]) == username for user in self.users
|
||||
):
|
||||
raise InvalidUser
|
||||
|
||||
self.users.append(
|
||||
{
|
||||
|
@ -216,7 +189,7 @@ class Data:
|
|||
break
|
||||
|
||||
if index is None:
|
||||
raise InvalidUser(translation_key="user_not_found")
|
||||
raise InvalidUser
|
||||
|
||||
self.users.pop(index)
|
||||
|
||||
|
@ -232,50 +205,7 @@ class Data:
|
|||
user["password"] = self.hash_password(new_password, True).decode()
|
||||
break
|
||||
else:
|
||||
raise InvalidUser(translation_key="user_not_found")
|
||||
|
||||
@callback
|
||||
def _validate_new_username(self, new_username: str) -> None:
|
||||
"""Validate that username is normalized and unique.
|
||||
|
||||
Raises InvalidUsername if the new username is invalid.
|
||||
"""
|
||||
normalized_username = self.normalize_username(
|
||||
new_username, force_normalize=True
|
||||
)
|
||||
if normalized_username != new_username:
|
||||
raise InvalidUsername(
|
||||
translation_key="username_not_normalized",
|
||||
translation_placeholders={"new_username": new_username},
|
||||
)
|
||||
|
||||
if any(
|
||||
self.normalize_username(user["username"]) == normalized_username
|
||||
for user in self.users
|
||||
):
|
||||
raise InvalidUsername(
|
||||
translation_key="username_already_exists",
|
||||
translation_placeholders={"username": new_username},
|
||||
)
|
||||
|
||||
@callback
|
||||
def change_username(self, username: str, new_username: str) -> None:
|
||||
"""Update the username.
|
||||
|
||||
Raises InvalidUser if user cannot be found.
|
||||
Raises InvalidUsername if the new username is invalid.
|
||||
"""
|
||||
username = self.normalize_username(username)
|
||||
self._validate_new_username(new_username)
|
||||
|
||||
for user in self.users:
|
||||
if self.normalize_username(user["username"]) == username:
|
||||
user["username"] = new_username
|
||||
assert self._data is not None
|
||||
self._async_check_for_not_normalized_usernames(self._data)
|
||||
break
|
||||
else:
|
||||
raise InvalidUser(translation_key="user_not_found")
|
||||
raise InvalidUser
|
||||
|
||||
async def async_save(self) -> None:
|
||||
"""Save data."""
|
||||
|
@ -305,7 +235,7 @@ class HassAuthProvider(AuthProvider):
|
|||
await data.async_load()
|
||||
self.data = data
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return HassLoginFlow(self)
|
||||
|
||||
|
@ -348,20 +278,6 @@ class HassAuthProvider(AuthProvider):
|
|||
)
|
||||
await self.data.async_save()
|
||||
|
||||
async def async_change_username(
|
||||
self, credential: Credentials, new_username: str
|
||||
) -> None:
|
||||
"""Validate new username and change it including updating credentials object."""
|
||||
if self.data is None:
|
||||
await self.async_initialize()
|
||||
assert self.data is not None
|
||||
|
||||
self.data.change_username(credential.data["username"], new_username)
|
||||
self.hass.auth.async_update_user_credentials_data(
|
||||
credential, {**credential.data, "username": new_username}
|
||||
)
|
||||
await self.data.async_save()
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Mapping[str, str]
|
||||
) -> Credentials:
|
||||
|
@ -405,7 +321,7 @@ class HassLoginFlow(LoginFlow):
|
|||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> AuthFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
"""Example auth provider."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import hmac
|
||||
from typing import cast
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||
from ..models import Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
|
@ -36,7 +36,7 @@ class InvalidAuthError(HomeAssistantError):
|
|||
class ExampleAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return ExampleLoginFlow(self)
|
||||
|
||||
|
@ -98,7 +98,7 @@ class ExampleLoginFlow(LoginFlow):
|
|||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> AuthFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle the step of the form."""
|
||||
errors = None
|
||||
|
||||
|
|
123
homeassistant/auth/providers/legacy_api_password.py
Normal file
123
homeassistant/auth/providers/legacy_api_password.py
Normal file
|
@ -0,0 +1,123 @@
|
|||
"""Support Legacy API password auth provider.
|
||||
|
||||
It will be removed when auth system production ready
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import hmac
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import async_get_hass, callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from ..models import Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
AUTH_PROVIDER_TYPE = "legacy_api_password"
|
||||
CONF_API_PASSWORD = "api_password"
|
||||
|
||||
_CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
|
||||
)
|
||||
|
||||
|
||||
def _create_repair_and_validate(config: dict[str, Any]) -> dict[str, Any]:
|
||||
async_create_issue(
|
||||
async_get_hass(),
|
||||
"auth",
|
||||
"deprecated_legacy_api_password",
|
||||
breaks_in_ha_version="2024.6.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_legacy_api_password",
|
||||
)
|
||||
|
||||
return _CONFIG_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
CONFIG_SCHEMA = _create_repair_and_validate
|
||||
|
||||
|
||||
LEGACY_USER_NAME = "Legacy API password user"
|
||||
|
||||
|
||||
class InvalidAuthError(HomeAssistantError):
|
||||
"""Raised when submitting invalid authentication."""
|
||||
|
||||
|
||||
@AUTH_PROVIDERS.register(AUTH_PROVIDER_TYPE)
|
||||
class LegacyApiPasswordAuthProvider(AuthProvider):
|
||||
"""An auth provider support legacy api_password."""
|
||||
|
||||
DEFAULT_TITLE = "Legacy API Password"
|
||||
|
||||
@property
|
||||
def api_password(self) -> str:
|
||||
"""Return api_password."""
|
||||
return str(self.config[CONF_API_PASSWORD])
|
||||
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return LegacyLoginFlow(self)
|
||||
|
||||
@callback
|
||||
def async_validate_login(self, password: str) -> None:
|
||||
"""Validate password."""
|
||||
api_password = str(self.config[CONF_API_PASSWORD])
|
||||
|
||||
if not hmac.compare_digest(
|
||||
api_password.encode("utf-8"), password.encode("utf-8")
|
||||
):
|
||||
raise InvalidAuthError
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Mapping[str, str]
|
||||
) -> Credentials:
|
||||
"""Return credentials for this login."""
|
||||
credentials = await self.async_credentials()
|
||||
if credentials:
|
||||
return credentials[0]
|
||||
|
||||
return self.async_create_credentials({})
|
||||
|
||||
async def async_user_meta_for_credentials(
|
||||
self, credentials: Credentials
|
||||
) -> UserMeta:
|
||||
"""Return info for the user.
|
||||
|
||||
Will be used to populate info when creating a new user.
|
||||
"""
|
||||
return UserMeta(name=LEGACY_USER_NAME, is_active=True)
|
||||
|
||||
|
||||
class LegacyLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
cast(
|
||||
LegacyApiPasswordAuthProvider, self._auth_provider
|
||||
).async_validate_login(user_input["password"])
|
||||
except InvalidAuthError:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
if not errors:
|
||||
return await self.async_finish({})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema({vol.Required("password"): str}),
|
||||
errors=errors,
|
||||
)
|
|
@ -3,7 +3,6 @@
|
|||
It shows list of users if access from trusted network.
|
||||
Abort login flow if not access from trusted network.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
|
@ -20,22 +19,17 @@ from typing import Any, cast
|
|||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.network import is_cloud_connection
|
||||
|
||||
from .. import InvalidAuthError
|
||||
from ..models import (
|
||||
AuthFlowContext,
|
||||
AuthFlowResult,
|
||||
Credentials,
|
||||
RefreshToken,
|
||||
UserMeta,
|
||||
)
|
||||
from ..models import Credentials, RefreshToken, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
type IPAddress = IPv4Address | IPv6Address
|
||||
type IPNetwork = IPv4Network | IPv6Network
|
||||
IPAddress = IPv4Address | IPv6Address
|
||||
IPNetwork = IPv4Network | IPv6Network
|
||||
|
||||
CONF_TRUSTED_NETWORKS = "trusted_networks"
|
||||
CONF_TRUSTED_USERS = "trusted_users"
|
||||
|
@ -104,7 +98,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
|||
"""Trusted Networks auth provider does not support MFA."""
|
||||
return False
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
assert context is not None
|
||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||
|
@ -232,7 +226,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
|||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> AuthFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle the step of the form."""
|
||||
try:
|
||||
cast(
|
||||
|
|
|
@ -6,24 +6,10 @@ Since we have dropped support for Python 3.10, we can remove this backport.
|
|||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum as _StrEnum
|
||||
from functools import partial
|
||||
from enum import StrEnum
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
|
||||
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
__all__ = [
|
||||
"StrEnum",
|
||||
]
|
||||
|
|
|
@ -1,31 +1,81 @@
|
|||
"""Functools backports from standard lib.
|
||||
"""Functools backports from standard lib."""
|
||||
|
||||
This file contained the backport of the cached_property implementation of Python 3.12.
|
||||
|
||||
Since we have dropped support for Python 3.11, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
# This file contains parts of Python's module wrapper
|
||||
# for the _functools C module
|
||||
# to allow utilities written in Python to be added
|
||||
# to the functools module.
|
||||
# Written by Nick Coghlan <ncoghlan at gmail.com>,
|
||||
# Raymond Hettinger <python at rcn.com>,
|
||||
# and Łukasz Langa <lukasz at langa.pl>.
|
||||
# Copyright © 2001-2023 Python Software Foundation; All Rights Reserved
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# pylint: disable-next=hass-deprecated-import
|
||||
from functools import cached_property as _cached_property, partial
|
||||
from collections.abc import Callable
|
||||
from types import GenericAlias
|
||||
from typing import Any, Generic, Self, TypeVar, overload
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
|
||||
_DEPRECATED_cached_property = DeprecatedAlias(
|
||||
_cached_property, "functools.cached_property", "2025.5"
|
||||
)
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
class cached_property(Generic[_T]):
|
||||
"""Backport of Python 3.12's cached_property.
|
||||
|
||||
Includes https://github.com/python/cpython/pull/101890/files
|
||||
"""
|
||||
|
||||
def __init__(self, func: Callable[[Any], _T]) -> None:
|
||||
"""Initialize."""
|
||||
self.func: Callable[[Any], _T] = func
|
||||
self.attrname: str | None = None
|
||||
self.__doc__ = func.__doc__
|
||||
|
||||
def __set_name__(self, owner: type[Any], name: str) -> None:
|
||||
"""Set name."""
|
||||
if self.attrname is None:
|
||||
self.attrname = name
|
||||
elif name != self.attrname:
|
||||
raise TypeError(
|
||||
"Cannot assign the same cached_property to two different names "
|
||||
f"({self.attrname!r} and {name!r})."
|
||||
)
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self:
|
||||
...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: Any, owner: type[Any] | None = None) -> _T:
|
||||
...
|
||||
|
||||
def __get__(
|
||||
self, instance: Any | None, owner: type[Any] | None = None
|
||||
) -> _T | Self:
|
||||
"""Get."""
|
||||
if instance is None:
|
||||
return self
|
||||
if self.attrname is None:
|
||||
raise TypeError(
|
||||
"Cannot use cached_property instance without calling __set_name__ on it."
|
||||
)
|
||||
try:
|
||||
cache = instance.__dict__
|
||||
# not all objects have __dict__ (e.g. class defines slots)
|
||||
except AttributeError:
|
||||
msg = (
|
||||
f"No '__dict__' attribute on {type(instance).__name__!r} "
|
||||
f"instance to cache {self.attrname!r} property."
|
||||
)
|
||||
raise TypeError(msg) from None
|
||||
val = self.func(instance)
|
||||
try:
|
||||
cache[self.attrname] = val
|
||||
except TypeError:
|
||||
msg = (
|
||||
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
|
||||
f"does not support item assignment for caching {self.attrname!r} property."
|
||||
)
|
||||
raise TypeError(msg) from None
|
||||
return val
|
||||
|
||||
__class_getitem__ = classmethod(GenericAlias) # type: ignore[var-annotated]
|
||||
|
|
|
@ -1,126 +0,0 @@
|
|||
"""Home Assistant module to handle restoring backups."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
import securetar
|
||||
|
||||
from .const import __version__ as HA_VERSION
|
||||
|
||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||
KEEP_PATHS = ("backups",)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RestoreBackupFileContent:
|
||||
"""Definition for restore backup file content."""
|
||||
|
||||
backup_file_path: Path
|
||||
|
||||
|
||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||
"""Return the contents of the restore backup file."""
|
||||
instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE)
|
||||
try:
|
||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||
return RestoreBackupFileContent(
|
||||
backup_file_path=Path(instruction_content["path"])
|
||||
)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||
config_contents = sorted(
|
||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||
)
|
||||
|
||||
for entry in config_contents:
|
||||
entrypath = config_dir.joinpath(entry)
|
||||
|
||||
if entrypath.is_file():
|
||||
entrypath.unlink()
|
||||
elif entrypath.is_dir():
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
|
||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
"""Extract the backup file to the config directory."""
|
||||
with (
|
||||
TemporaryDirectory() as tempdir,
|
||||
securetar.SecureTarFile(
|
||||
backup_file_path,
|
||||
gzip=False,
|
||||
mode="r",
|
||||
) as ostf,
|
||||
):
|
||||
ostf.extractall(
|
||||
path=Path(tempdir, "extracted"),
|
||||
members=securetar.secure_path(ostf),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
backup_meta_file = Path(tempdir, "extracted", "backup.json")
|
||||
backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8"))
|
||||
|
||||
if (
|
||||
backup_meta_version := AwesomeVersion(
|
||||
backup_meta["homeassistant"]["version"]
|
||||
)
|
||||
) > HA_VERSION:
|
||||
raise ValueError(
|
||||
f"You need at least Home Assistant version {backup_meta_version} to restore this backup"
|
||||
)
|
||||
|
||||
with securetar.SecureTarFile(
|
||||
Path(
|
||||
tempdir,
|
||||
"extracted",
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
mode="r",
|
||||
) as istf:
|
||||
for member in istf.getmembers():
|
||||
if member.name == "data":
|
||||
continue
|
||||
member.name = member.name.replace("data/", "")
|
||||
_clear_configuration_directory(config_dir)
|
||||
istf.extractall(
|
||||
path=config_dir,
|
||||
members=[
|
||||
member
|
||||
for member in securetar.secure_path(istf)
|
||||
if member.name != "data"
|
||||
],
|
||||
filter="fully_trusted",
|
||||
)
|
||||
|
||||
|
||||
def restore_backup(config_dir_path: str) -> bool:
|
||||
"""Restore the backup file if any.
|
||||
|
||||
Returns True if a restore backup file was found and restored, False otherwise.
|
||||
"""
|
||||
config_dir = Path(config_dir_path)
|
||||
if not (restore_content := restore_backup_file_content(config_dir)):
|
||||
return False
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
backup_file_path = restore_content.backup_file_path
|
||||
_LOGGER.info("Restoring %s", backup_file_path)
|
||||
try:
|
||||
_extract_backup(config_dir, backup_file_path)
|
||||
except FileNotFoundError as err:
|
||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||
_LOGGER.info("Restore complete, restarting")
|
||||
return True
|
|
@ -1,252 +1,20 @@
|
|||
"""Block blocking calls being done in asyncio."""
|
||||
|
||||
import builtins
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
import glob
|
||||
from http.client import HTTPConnection
|
||||
import importlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
from ssl import SSLContext
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from .helpers.frame import get_current_frame
|
||||
from .util.loop import protect_loop
|
||||
|
||||
_IN_TESTS = "unittest" in sys.modules
|
||||
|
||||
ALLOWED_FILE_PREFIXES = ("/proc",)
|
||||
|
||||
|
||||
def _check_import_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If the module is already imported, we can ignore it.
|
||||
return bool((args := mapped_args.get("args")) and args[0] in sys.modules)
|
||||
|
||||
|
||||
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If the file is in /proc we can ignore it.
|
||||
args = mapped_args["args"]
|
||||
path = args[0] if type(args[0]) is str else str(args[0]) # noqa: E721
|
||||
return path.startswith(ALLOWED_FILE_PREFIXES)
|
||||
|
||||
|
||||
def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
#
|
||||
# Avoid extracting the stack unless we need to since it
|
||||
# will have to access the linecache which can do blocking
|
||||
# I/O and we are trying to avoid blocking calls.
|
||||
#
|
||||
# frame[0] is us
|
||||
# frame[1] is raise_for_blocking_call
|
||||
# frame[2] is protected_loop_func
|
||||
# frame[3] is the offender
|
||||
with suppress(ValueError):
|
||||
return get_current_frame(4).f_code.co_filename.endswith("pydevd.py")
|
||||
return False
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class BlockingCall:
|
||||
"""Class to hold information about a blocking call."""
|
||||
|
||||
original_func: Callable
|
||||
object: object
|
||||
function: str
|
||||
check_allowed: Callable[[dict[str, Any]], bool] | None
|
||||
strict: bool
|
||||
strict_core: bool
|
||||
skip_for_tests: bool
|
||||
|
||||
|
||||
_BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
BlockingCall(
|
||||
original_func=HTTPConnection.putrequest,
|
||||
object=HTTPConnection,
|
||||
function="putrequest",
|
||||
check_allowed=None,
|
||||
strict=True,
|
||||
strict_core=True,
|
||||
skip_for_tests=False,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=time.sleep,
|
||||
object=time,
|
||||
function="sleep",
|
||||
check_allowed=_check_sleep_call_allowed,
|
||||
strict=True,
|
||||
strict_core=True,
|
||||
skip_for_tests=False,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=glob.glob,
|
||||
object=glob,
|
||||
function="glob",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=False,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=glob.iglob,
|
||||
object=glob,
|
||||
function="iglob",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=False,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=os.walk,
|
||||
object=os,
|
||||
function="walk",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=False,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=os.listdir,
|
||||
object=os,
|
||||
function="listdir",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=os.scandir,
|
||||
object=os,
|
||||
function="scandir",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=builtins.open,
|
||||
object=builtins,
|
||||
function="open",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=importlib.import_module,
|
||||
object=importlib,
|
||||
function="import_module",
|
||||
check_allowed=_check_import_call_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.load_default_certs,
|
||||
object=SSLContext,
|
||||
function="load_default_certs",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.load_verify_locations,
|
||||
object=SSLContext,
|
||||
function="load_verify_locations",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=SSLContext.load_cert_chain,
|
||||
object=SSLContext,
|
||||
function="load_cert_chain",
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.open,
|
||||
object=Path,
|
||||
function="open",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.read_text,
|
||||
object=Path,
|
||||
function="read_text",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.read_bytes,
|
||||
object=Path,
|
||||
function="read_bytes",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.write_text,
|
||||
object=Path,
|
||||
function="write_text",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
BlockingCall(
|
||||
original_func=Path.write_bytes,
|
||||
object=Path,
|
||||
function="write_bytes",
|
||||
check_allowed=_check_file_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class BlockedCalls:
|
||||
"""Class to track which calls are blocked."""
|
||||
|
||||
calls: set[BlockingCall]
|
||||
|
||||
|
||||
_BLOCKED_CALLS = BlockedCalls(set())
|
||||
from .util.async_ import protect_loop
|
||||
|
||||
|
||||
def enable() -> None:
|
||||
"""Enable the detection of blocking calls in the event loop."""
|
||||
calls = _BLOCKED_CALLS.calls
|
||||
if calls:
|
||||
raise RuntimeError("Blocking call detection is already enabled")
|
||||
# Prevent urllib3 and requests doing I/O in event loop
|
||||
HTTPConnection.putrequest = protect_loop( # type: ignore[method-assign]
|
||||
HTTPConnection.putrequest
|
||||
)
|
||||
|
||||
loop_thread_id = threading.get_ident()
|
||||
for blocking_call in _BLOCKING_CALLS:
|
||||
if _IN_TESTS and blocking_call.skip_for_tests:
|
||||
continue
|
||||
# Prevent sleeping in event loop. Non-strict since 2022.02
|
||||
time.sleep = protect_loop(time.sleep, strict=False)
|
||||
|
||||
protected_function = protect_loop(
|
||||
blocking_call.original_func,
|
||||
strict=blocking_call.strict,
|
||||
strict_core=blocking_call.strict_core,
|
||||
check_allowed=blocking_call.check_allowed,
|
||||
loop_thread_id=loop_thread_id,
|
||||
)
|
||||
setattr(blocking_call.object, blocking_call.function, protected_function)
|
||||
calls.add(blocking_call)
|
||||
# Currently disabled. pytz doing I/O when getting timezone.
|
||||
# Prevent files being opened inside the event loop
|
||||
# builtins.open = protect_loop(builtins.open)
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
"""Provide methods to bootstrap a Home Assistant instance."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
import contextlib
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
|
||||
import mimetypes
|
||||
from operator import contains, itemgetter
|
||||
import logging.handlers
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
@ -18,112 +13,53 @@ import threading
|
|||
from time import monotonic
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
# Import cryptography early since import openssl is not thread-safe
|
||||
# _frozen_importlib._DeadlockError: deadlock detected by _ModuleLock('cryptography.hazmat.backends.openssl.backend')
|
||||
import cryptography.hazmat.backends.openssl.backend # noqa: F401
|
||||
import voluptuous as vol
|
||||
import yarl
|
||||
|
||||
from . import (
|
||||
block_async_io,
|
||||
config as conf_util,
|
||||
config_entries,
|
||||
core,
|
||||
loader,
|
||||
requirements,
|
||||
)
|
||||
|
||||
# Pre-import frontend deps which have no requirements here to avoid
|
||||
# loading them at run time and blocking the event loop. We do this ahead
|
||||
# of time so that we do not have to flag frontend deps with `import_executor`
|
||||
# as it would create a thundering heard of executor jobs trying to import
|
||||
# frontend deps at the same time.
|
||||
from .components import (
|
||||
api as api_pre_import, # noqa: F401
|
||||
auth as auth_pre_import, # noqa: F401
|
||||
config as config_pre_import, # noqa: F401
|
||||
default_config as default_config_pre_import, # noqa: F401
|
||||
device_automation as device_automation_pre_import, # noqa: F401
|
||||
diagnostics as diagnostics_pre_import, # noqa: F401
|
||||
file_upload as file_upload_pre_import, # noqa: F401
|
||||
group as group_pre_import, # noqa: F401
|
||||
history as history_pre_import, # noqa: F401
|
||||
http, # not named pre_import since it has requirements
|
||||
image_upload as image_upload_import, # noqa: F401 - not named pre_import since it has requirements
|
||||
logbook as logbook_pre_import, # noqa: F401
|
||||
lovelace as lovelace_pre_import, # noqa: F401
|
||||
onboarding as onboarding_pre_import, # noqa: F401
|
||||
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
|
||||
repairs as repairs_pre_import, # noqa: F401
|
||||
search as search_pre_import, # noqa: F401
|
||||
sensor as sensor_pre_import, # noqa: F401
|
||||
system_log as system_log_pre_import, # noqa: F401
|
||||
webhook as webhook_pre_import, # noqa: F401
|
||||
websocket_api as websocket_api_pre_import, # noqa: F401
|
||||
)
|
||||
from .components.sensor import recorder as sensor_recorder # noqa: F401
|
||||
from . import config as conf_util, config_entries, core, loader, requirements
|
||||
from .components import http
|
||||
from .const import (
|
||||
BASE_PLATFORMS,
|
||||
FORMAT_DATETIME,
|
||||
KEY_DATA_LOGGING as DATA_LOGGING,
|
||||
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
REQUIRED_NEXT_PYTHON_VER,
|
||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||
)
|
||||
from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
category_registry,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
entity,
|
||||
entity_registry,
|
||||
floor_registry,
|
||||
issue_registry,
|
||||
label_registry,
|
||||
recorder,
|
||||
restore_state,
|
||||
template,
|
||||
translation,
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.dispatcher import async_dispatcher_send
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
# that it is not part of the public API and should not be used
|
||||
# by integrations. It is only used for internal tracking of
|
||||
# which integrations are being set up.
|
||||
_setup_started,
|
||||
async_get_setup_timings,
|
||||
DATA_SETUP_STARTED,
|
||||
DATA_SETUP_TIME,
|
||||
async_notify_setup_error,
|
||||
async_set_domains_to_be_loaded,
|
||||
async_setup_component,
|
||||
)
|
||||
from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util import dt as dt_util
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
from anyio._backends import _asyncio # noqa: F401
|
||||
|
||||
from .util.package import async_get_user_site, is_virtual_env
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .runner import RuntimeConfig
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SETUP_ORDER_SORT_KEY = partial(contains, BASE_PLATFORMS)
|
||||
|
||||
|
||||
ERROR_LOG_FILENAME = "home-assistant.log"
|
||||
|
||||
# hass.data key for logging information.
|
||||
DATA_REGISTRIES_LOADED: HassKey[None] = HassKey("bootstrap_registries_loaded")
|
||||
DATA_LOGGING = "logging"
|
||||
DATA_REGISTRIES_LOADED = "bootstrap_registries_loaded"
|
||||
|
||||
LOG_SLOW_STARTUP_INTERVAL = 60
|
||||
SLOW_STARTUP_CHECK_INTERVAL = 1
|
||||
|
@ -133,17 +69,11 @@ STAGE_2_TIMEOUT = 300
|
|||
WRAP_UP_TIMEOUT = 300
|
||||
COOLDOWN_TIME = 60
|
||||
|
||||
MAX_LOAD_CONCURRENTLY = 6
|
||||
|
||||
DEBUGGER_INTEGRATIONS = {"debugpy"}
|
||||
|
||||
# Core integrations are unconditionally loaded
|
||||
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
|
||||
|
||||
# Integrations that are loaded right after the core is set up
|
||||
LOGGING_AND_HTTP_DEPS_INTEGRATIONS = {
|
||||
# isal is loaded right away before `http` to ensure if its
|
||||
# enabled, that `isal` is up to date.
|
||||
"isal",
|
||||
LOGGING_INTEGRATIONS = {
|
||||
# Set log levels
|
||||
"logger",
|
||||
# Error logging
|
||||
|
@ -180,9 +110,7 @@ DEFAULT_INTEGRATIONS = {
|
|||
# These integrations are set up unless recovery mode is activated.
|
||||
#
|
||||
# Integrations providing core functionality:
|
||||
"analytics", # Needed for onboarding
|
||||
"application_credentials",
|
||||
"backup",
|
||||
"frontend",
|
||||
"hardware",
|
||||
"logger",
|
||||
|
@ -216,82 +144,33 @@ DEFAULT_INTEGRATIONS_SUPERVISOR = {
|
|||
# These integrations are set up if using the Supervisor
|
||||
"hassio",
|
||||
}
|
||||
DEFAULT_INTEGRATIONS_NON_SUPERVISOR = {
|
||||
# These integrations are set up if not using the Supervisor
|
||||
"backup",
|
||||
}
|
||||
CRITICAL_INTEGRATIONS = {
|
||||
# Recovery mode is activated if these integrations fail to set up
|
||||
"frontend",
|
||||
}
|
||||
|
||||
SETUP_ORDER = (
|
||||
# Load logging and http deps as soon as possible
|
||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
|
||||
# Setup frontend
|
||||
("frontend", FRONTEND_INTEGRATIONS),
|
||||
# Setup recorder
|
||||
("recorder", RECORDER_INTEGRATIONS),
|
||||
# Start up debuggers. Start these first in case they want to wait.
|
||||
("debugger", DEBUGGER_INTEGRATIONS),
|
||||
)
|
||||
|
||||
#
|
||||
# Storage keys we are likely to load during startup
|
||||
# in order of when we expect to load them.
|
||||
#
|
||||
# If they do not exist they will not be loaded
|
||||
#
|
||||
PRELOAD_STORAGE = [
|
||||
"core.logger",
|
||||
"core.network",
|
||||
"http.auth",
|
||||
"image",
|
||||
"lovelace_dashboards",
|
||||
"lovelace_resources",
|
||||
"core.uuid",
|
||||
"lovelace.map",
|
||||
"bluetooth.passive_update_processor",
|
||||
"bluetooth.remote_scanners",
|
||||
"assist_pipeline.pipelines",
|
||||
"core.analytics",
|
||||
"auth_module.totp",
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_hass(
|
||||
runtime_config: RuntimeConfig,
|
||||
) -> core.HomeAssistant | None:
|
||||
"""Set up Home Assistant."""
|
||||
hass = core.HomeAssistant(runtime_config.config_dir)
|
||||
|
||||
async def create_hass() -> core.HomeAssistant:
|
||||
"""Create the hass object and do basic setup."""
|
||||
hass = core.HomeAssistant(runtime_config.config_dir)
|
||||
loader.async_setup(hass)
|
||||
|
||||
await async_enable_logging(
|
||||
hass,
|
||||
runtime_config.verbose,
|
||||
runtime_config.log_rotate_days,
|
||||
runtime_config.log_file,
|
||||
runtime_config.log_no_color,
|
||||
)
|
||||
|
||||
if runtime_config.debug or hass.loop.get_debug():
|
||||
hass.config.debug = True
|
||||
|
||||
hass.config.safe_mode = runtime_config.safe_mode
|
||||
hass.config.skip_pip = runtime_config.skip_pip
|
||||
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
|
||||
|
||||
return hass
|
||||
|
||||
async def stop_hass(hass: core.HomeAssistant) -> None:
|
||||
"""Stop hass."""
|
||||
# Ask integrations to shut down. It's messy but we can't
|
||||
# do a clean stop without knowing what is broken
|
||||
with contextlib.suppress(TimeoutError):
|
||||
async with hass.timeout.async_timeout(10):
|
||||
await hass.async_stop()
|
||||
|
||||
hass = await create_hass()
|
||||
async_enable_logging(
|
||||
hass,
|
||||
runtime_config.verbose,
|
||||
runtime_config.log_rotate_days,
|
||||
runtime_config.log_file,
|
||||
runtime_config.log_no_color,
|
||||
)
|
||||
|
||||
hass.config.safe_mode = runtime_config.safe_mode
|
||||
hass.config.skip_pip = runtime_config.skip_pip
|
||||
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
|
||||
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
||||
_LOGGER.warning(
|
||||
"Skipping pip installation of required modules. This may cause issues"
|
||||
|
@ -303,8 +182,7 @@ async def async_setup_hass(
|
|||
|
||||
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
|
||||
|
||||
block_async_io.enable()
|
||||
|
||||
loader.async_setup(hass)
|
||||
config_dict = None
|
||||
basic_setup_success = False
|
||||
|
||||
|
@ -328,31 +206,29 @@ async def async_setup_hass(
|
|||
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif not basic_setup_success:
|
||||
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
|
||||
_LOGGER.warning(
|
||||
"Detected that %s did not load. Activating recovery mode",
|
||||
",".join(CRITICAL_INTEGRATIONS),
|
||||
)
|
||||
# Ask integrations to shut down. It's messy but we can't
|
||||
# do a clean stop without knowing what is broken
|
||||
with contextlib.suppress(asyncio.TimeoutError):
|
||||
async with hass.timeout.async_timeout(10):
|
||||
await hass.async_stop()
|
||||
|
||||
recovery_mode = True
|
||||
old_config = hass.config
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
hass = core.HomeAssistant(old_config.config_dir)
|
||||
if old_logging:
|
||||
hass.data[DATA_LOGGING] = old_logging
|
||||
hass.config.debug = old_config.debug
|
||||
hass.config.skip_pip = old_config.skip_pip
|
||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||
hass.config.internal_url = old_config.internal_url
|
||||
|
@ -399,45 +275,35 @@ def open_hass_ui(hass: core.HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
|
||||
def _init_blocking_io_modules_in_executor() -> None:
|
||||
"""Initialize modules that do blocking I/O in executor."""
|
||||
# Cache the result of platform.uname().processor in the executor.
|
||||
# Multiple modules call this function at startup which
|
||||
# executes a blocking subprocess call. This is a problem for the
|
||||
# asyncio event loop. By priming the cache of uname we can
|
||||
# avoid the blocking call in the event loop.
|
||||
_ = platform.uname().processor
|
||||
# Initialize the mimetypes module to avoid blocking calls
|
||||
# to the filesystem to load the mime.types file.
|
||||
mimetypes.init()
|
||||
# Initialize is_official_image and is_docker_env to avoid blocking calls
|
||||
# to the filesystem.
|
||||
is_official_image()
|
||||
is_docker_env()
|
||||
|
||||
|
||||
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
"""Load the registries and modules that will do blocking I/O."""
|
||||
"""Load the registries and cache the result of platform.uname().processor."""
|
||||
if DATA_REGISTRIES_LOADED in hass.data:
|
||||
return
|
||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||
translation.async_setup(hass)
|
||||
|
||||
def _cache_uname_processor() -> None:
|
||||
"""Cache the result of platform.uname().processor in the executor.
|
||||
|
||||
Multiple modules call this function at startup which
|
||||
executes a blocking subprocess call. This is a problem for the
|
||||
asyncio event loop. By primeing the cache of uname we can
|
||||
avoid the blocking call in the event loop.
|
||||
"""
|
||||
platform.uname().processor # pylint: disable=expression-not-assigned
|
||||
|
||||
# Load the registries and cache the result of platform.uname().processor
|
||||
entity.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
create_eager_task(category_registry.async_load(hass)),
|
||||
create_eager_task(device_registry.async_load(hass)),
|
||||
create_eager_task(entity_registry.async_load(hass)),
|
||||
create_eager_task(floor_registry.async_load(hass)),
|
||||
create_eager_task(issue_registry.async_load(hass)),
|
||||
create_eager_task(label_registry.async_load(hass)),
|
||||
hass.async_add_executor_job(_init_blocking_io_modules_in_executor),
|
||||
create_eager_task(template.async_load_custom_templates(hass)),
|
||||
create_eager_task(restore_state.async_load(hass)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
area_registry.async_load(hass),
|
||||
device_registry.async_load(hass),
|
||||
entity_registry.async_load(hass),
|
||||
issue_registry.async_load(hass),
|
||||
label_registry.async_load(hass),
|
||||
hass.async_add_executor_job(_cache_uname_processor),
|
||||
template.async_load_custom_templates(hass),
|
||||
restore_state.async_load(hass),
|
||||
hass.config_entries.async_initialize(),
|
||||
)
|
||||
|
||||
|
||||
|
@ -452,9 +318,6 @@ async def async_from_config_dict(
|
|||
start = monotonic()
|
||||
|
||||
hass.config_entries = config_entries.ConfigEntries(hass, config)
|
||||
# Prime custom component cache early so we know if registry entries are tied
|
||||
# to a custom integration
|
||||
await loader.async_get_custom_components(hass)
|
||||
await async_load_base_functionality(hass)
|
||||
|
||||
# Set up core.
|
||||
|
@ -463,11 +326,7 @@ async def async_from_config_dict(
|
|||
if not all(
|
||||
await asyncio.gather(
|
||||
*(
|
||||
create_eager_task(
|
||||
async_setup_component(hass, domain, config),
|
||||
name=f"bootstrap setup {domain}",
|
||||
loop=hass.loop,
|
||||
)
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in CORE_INTEGRATIONS
|
||||
)
|
||||
)
|
||||
|
@ -480,7 +339,7 @@ async def async_from_config_dict(
|
|||
core_config = config.get(core.DOMAIN, {})
|
||||
|
||||
try:
|
||||
await async_process_ha_core_config(hass, core_config)
|
||||
await conf_util.async_process_ha_core_config(hass, core_config)
|
||||
except vol.Invalid as config_err:
|
||||
conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
|
||||
async_notify_setup_error(hass, core.DOMAIN)
|
||||
|
@ -515,7 +374,7 @@ async def async_from_config_dict(
|
|||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
core.DOMAIN,
|
||||
f"python_version_{required_python_version}",
|
||||
"python_version",
|
||||
is_fixable=False,
|
||||
severity=issue_registry.IssueSeverity.WARNING,
|
||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
|
@ -530,7 +389,8 @@ async def async_from_config_dict(
|
|||
return hass
|
||||
|
||||
|
||||
async def async_enable_logging(
|
||||
@core.callback
|
||||
def async_enable_logging(
|
||||
hass: core.HomeAssistant,
|
||||
verbose: bool = False,
|
||||
log_rotate_days: int | None = None,
|
||||
|
@ -587,10 +447,10 @@ async def async_enable_logging(
|
|||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
|
||||
sys.excepthook = lambda *args: logging.getLogger().exception(
|
||||
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
||||
"Uncaught exception", exc_info=args
|
||||
)
|
||||
threading.excepthook = lambda args: logging.getLogger().exception(
|
||||
threading.excepthook = lambda args: logging.getLogger(None).exception(
|
||||
"Uncaught thread exception",
|
||||
exc_info=( # type: ignore[arg-type]
|
||||
args.exc_type,
|
||||
|
@ -613,13 +473,28 @@ async def async_enable_logging(
|
|||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
err_handler: (
|
||||
logging.handlers.RotatingFileHandler
|
||||
| logging.handlers.TimedRotatingFileHandler
|
||||
)
|
||||
if log_rotate_days:
|
||||
err_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
err_log_path, when="midnight", backupCount=log_rotate_days
|
||||
)
|
||||
else:
|
||||
err_handler = logging.handlers.RotatingFileHandler(
|
||||
err_log_path, backupCount=1
|
||||
)
|
||||
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
|
||||
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger = logging.getLogger("")
|
||||
logger.addHandler(err_handler)
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
|
@ -631,41 +506,6 @@ async def async_enable_logging(
|
|||
async_activate_log_queue_handler(hass)
|
||||
|
||||
|
||||
def _create_log_file(
|
||||
err_log_path: str, log_rotate_days: int | None
|
||||
) -> RotatingFileHandler | TimedRotatingFileHandler:
|
||||
"""Create log file and do roll over."""
|
||||
err_handler: RotatingFileHandler | TimedRotatingFileHandler
|
||||
if log_rotate_days:
|
||||
err_handler = TimedRotatingFileHandler(
|
||||
err_log_path, when="midnight", backupCount=log_rotate_days
|
||||
)
|
||||
else:
|
||||
err_handler = _RotatingFileHandlerWithoutShouldRollOver(
|
||||
err_log_path, backupCount=1
|
||||
)
|
||||
|
||||
try:
|
||||
err_handler.doRollover()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Error rolling over log file: %s", err)
|
||||
|
||||
return err_handler
|
||||
|
||||
|
||||
class _RotatingFileHandlerWithoutShouldRollOver(RotatingFileHandler):
|
||||
"""RotatingFileHandler that does not check if it should roll over on every log."""
|
||||
|
||||
def shouldRollover(self, record: logging.LogRecord) -> bool:
|
||||
"""Never roll over.
|
||||
|
||||
The shouldRollover check is expensive because it has to stat
|
||||
the log file for every log record. Since we do not set maxBytes
|
||||
the result of this check is always False.
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
"""Add local library to Python Path.
|
||||
|
||||
|
@ -695,79 +535,42 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
|||
# Add domains depending on if the Supervisor is used or not
|
||||
if "SUPERVISOR" in os.environ:
|
||||
domains.update(DEFAULT_INTEGRATIONS_SUPERVISOR)
|
||||
else:
|
||||
domains.update(DEFAULT_INTEGRATIONS_NON_SUPERVISOR)
|
||||
|
||||
return domains
|
||||
|
||||
|
||||
class _WatchPendingSetups:
|
||||
"""Periodic log and dispatch of setups that are pending."""
|
||||
async def _async_watch_pending_setups(hass: core.HomeAssistant) -> None:
|
||||
"""Periodic log of setups that are pending.
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: core.HomeAssistant,
|
||||
setup_started: dict[tuple[str, str | None], float],
|
||||
) -> None:
|
||||
"""Initialize the WatchPendingSetups class."""
|
||||
self._hass = hass
|
||||
self._setup_started = setup_started
|
||||
self._duration_count = 0
|
||||
self._handle: asyncio.TimerHandle | None = None
|
||||
self._previous_was_empty = True
|
||||
self._loop = hass.loop
|
||||
Pending for longer than LOG_SLOW_STARTUP_INTERVAL.
|
||||
"""
|
||||
loop_count = 0
|
||||
setup_started: dict[str, datetime] = hass.data[DATA_SETUP_STARTED]
|
||||
previous_was_empty = True
|
||||
while True:
|
||||
now = dt_util.utcnow()
|
||||
remaining_with_setup_started = {
|
||||
domain: (now - setup_started[domain]).total_seconds()
|
||||
for domain in setup_started
|
||||
}
|
||||
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
|
||||
if remaining_with_setup_started or not previous_was_empty:
|
||||
async_dispatcher_send(
|
||||
hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
|
||||
)
|
||||
previous_was_empty = not remaining_with_setup_started
|
||||
await asyncio.sleep(SLOW_STARTUP_CHECK_INTERVAL)
|
||||
loop_count += SLOW_STARTUP_CHECK_INTERVAL
|
||||
|
||||
def _async_watch(self) -> None:
|
||||
"""Periodic log of setups that are pending."""
|
||||
now = monotonic()
|
||||
self._duration_count += SLOW_STARTUP_CHECK_INTERVAL
|
||||
|
||||
remaining_with_setup_started: defaultdict[str, float] = defaultdict(float)
|
||||
for integration_group, start_time in self._setup_started.items():
|
||||
domain, _ = integration_group
|
||||
remaining_with_setup_started[domain] += now - start_time
|
||||
|
||||
if remaining_with_setup_started:
|
||||
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
|
||||
elif waiting_tasks := self._hass._active_tasks: # noqa: SLF001
|
||||
_LOGGER.debug("Waiting on tasks: %s", waiting_tasks)
|
||||
self._async_dispatch(remaining_with_setup_started)
|
||||
if (
|
||||
self._setup_started
|
||||
and self._duration_count % LOG_SLOW_STARTUP_INTERVAL == 0
|
||||
):
|
||||
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
||||
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||
if loop_count >= LOG_SLOW_STARTUP_INTERVAL and setup_started:
|
||||
_LOGGER.warning(
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
self._setup_started,
|
||||
", ".join(setup_started),
|
||||
)
|
||||
|
||||
_LOGGER.debug("Running timeout Zones: %s", self._hass.timeout.zones)
|
||||
self._async_schedule_next()
|
||||
|
||||
def _async_dispatch(self, remaining_with_setup_started: dict[str, float]) -> None:
|
||||
"""Dispatch the signal."""
|
||||
if remaining_with_setup_started or not self._previous_was_empty:
|
||||
async_dispatcher_send_internal(
|
||||
self._hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
|
||||
)
|
||||
self._previous_was_empty = not remaining_with_setup_started
|
||||
|
||||
def _async_schedule_next(self) -> None:
|
||||
"""Schedule the next call."""
|
||||
self._handle = self._loop.call_later(
|
||||
SLOW_STARTUP_CHECK_INTERVAL, self._async_watch
|
||||
)
|
||||
|
||||
def async_start(self) -> None:
|
||||
"""Start watching."""
|
||||
self._async_schedule_next()
|
||||
|
||||
def async_stop(self) -> None:
|
||||
"""Stop watching."""
|
||||
self._async_dispatch({})
|
||||
if self._handle:
|
||||
self._handle.cancel()
|
||||
self._handle = None
|
||||
loop_count = 0
|
||||
_LOGGER.debug("Running timeout Zones: %s", hass.timeout.zones)
|
||||
|
||||
|
||||
async def async_setup_multi_components(
|
||||
|
@ -778,18 +581,11 @@ async def async_setup_multi_components(
|
|||
"""Set up multiple domains. Log on failure."""
|
||||
# Avoid creating tasks for domains that were setup in a previous stage
|
||||
domains_not_yet_setup = domains - hass.config.components
|
||||
# Create setup tasks for base platforms first since everything will have
|
||||
# to wait to be imported, and the sooner we can get the base platforms
|
||||
# loaded the sooner we can start loading the rest of the integrations.
|
||||
futures = {
|
||||
domain: hass.async_create_task_internal(
|
||||
async_setup_component(hass, domain, config),
|
||||
f"setup component {domain}",
|
||||
eager_start=True,
|
||||
)
|
||||
for domain in sorted(
|
||||
domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
|
||||
domain: hass.async_create_task(
|
||||
async_setup_component(hass, domain, config), f"setup component {domain}"
|
||||
)
|
||||
for domain in domains_not_yet_setup
|
||||
}
|
||||
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
||||
for idx, domain in enumerate(futures):
|
||||
|
@ -802,121 +598,69 @@ async def async_setup_multi_components(
|
|||
)
|
||||
|
||||
|
||||
async def _async_resolve_domains_to_setup(
|
||||
async def _async_set_up_integrations(
|
||||
hass: core.HomeAssistant, config: dict[str, Any]
|
||||
) -> tuple[set[str], dict[str, loader.Integration]]:
|
||||
"""Resolve all dependencies and return list of domains to set up."""
|
||||
) -> None:
|
||||
"""Set up all the integrations."""
|
||||
hass.data[DATA_SETUP_STARTED] = {}
|
||||
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
|
||||
|
||||
watch_task = asyncio.create_task(_async_watch_pending_setups(hass))
|
||||
|
||||
domains_to_setup = _get_domains(hass, config)
|
||||
|
||||
needed_requirements: set[str] = set()
|
||||
platform_integrations = conf_util.extract_platform_integrations(
|
||||
config, BASE_PLATFORMS
|
||||
)
|
||||
# Ensure base platforms that have platform integrations are added to
|
||||
# to `domains_to_setup so they can be setup first instead of
|
||||
# discovering them when later when a config entry setup task
|
||||
# notices its needed and there is already a long line to use
|
||||
# the import executor.
|
||||
#
|
||||
# For example if we have
|
||||
# sensor:
|
||||
# - platform: template
|
||||
#
|
||||
# `template` has to be loaded to validate the config for sensor
|
||||
# so we want to start loading `sensor` as soon as we know
|
||||
# it will be needed. The more platforms under `sensor:`, the longer
|
||||
# it will take to finish setup for `sensor` because each of these
|
||||
# platforms has to be imported before we can validate the config.
|
||||
#
|
||||
# Thankfully we are migrating away from the platform pattern
|
||||
# so this will be less of a problem in the future.
|
||||
domains_to_setup.update(platform_integrations)
|
||||
|
||||
# Load manifests for base platforms and platform based integrations
|
||||
# that are defined under base platforms right away since we do not require
|
||||
# the manifest to list them as dependencies and we want to avoid the lock
|
||||
# contention when multiple integrations try to load them at once
|
||||
additional_manifests_to_load = {
|
||||
*BASE_PLATFORMS,
|
||||
*chain.from_iterable(platform_integrations.values()),
|
||||
}
|
||||
|
||||
translations_to_load = additional_manifests_to_load.copy()
|
||||
|
||||
# Resolve all dependencies so we know all integrations
|
||||
# that will have to be loaded and start right-away
|
||||
# that will have to be loaded and start rightaway
|
||||
integration_cache: dict[str, loader.Integration] = {}
|
||||
to_resolve: set[str] = domains_to_setup
|
||||
while to_resolve or additional_manifests_to_load:
|
||||
while to_resolve:
|
||||
old_to_resolve: set[str] = to_resolve
|
||||
to_resolve = set()
|
||||
|
||||
if additional_manifests_to_load:
|
||||
to_get = {*old_to_resolve, *additional_manifests_to_load}
|
||||
additional_manifests_to_load.clear()
|
||||
else:
|
||||
to_get = old_to_resolve
|
||||
integrations_to_process = [
|
||||
int_or_exc
|
||||
for int_or_exc in (
|
||||
await loader.async_get_integrations(hass, old_to_resolve)
|
||||
).values()
|
||||
if isinstance(int_or_exc, loader.Integration)
|
||||
]
|
||||
|
||||
manifest_deps: set[str] = set()
|
||||
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
||||
integrations_to_process: list[loader.Integration] = []
|
||||
|
||||
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
||||
if not isinstance(itg, loader.Integration):
|
||||
continue
|
||||
integration_cache[domain] = itg
|
||||
needed_requirements.update(itg.requirements)
|
||||
|
||||
# Make sure manifests for dependencies are loaded in the next
|
||||
# loop to try to group as many as manifest loads in a single
|
||||
# call to avoid the creating one-off executor jobs later in
|
||||
# the setup process
|
||||
additional_manifests_to_load.update(
|
||||
dep
|
||||
for dep in chain(itg.dependencies, itg.after_dependencies)
|
||||
if dep not in integration_cache
|
||||
)
|
||||
|
||||
if domain not in old_to_resolve:
|
||||
continue
|
||||
|
||||
integrations_to_process.append(itg)
|
||||
for itg in integrations_to_process:
|
||||
manifest_deps.update(itg.dependencies)
|
||||
manifest_deps.update(itg.after_dependencies)
|
||||
if not itg.all_dependencies_resolved:
|
||||
resolve_dependencies_tasks.append(
|
||||
create_eager_task(
|
||||
itg.resolve_dependencies(),
|
||||
name=f"resolve dependencies {domain}",
|
||||
loop=hass.loop,
|
||||
)
|
||||
)
|
||||
needed_requirements.update(itg.requirements)
|
||||
|
||||
if unseen_deps := manifest_deps - integration_cache.keys():
|
||||
if manifest_deps:
|
||||
# If there are dependencies, try to preload all
|
||||
# the integrations manifest at once and add them
|
||||
# to the list of requirements we need to install
|
||||
# so we can try to check if they are already installed
|
||||
# in a single call below which avoids each integration
|
||||
# having to wait for the lock to do it individually
|
||||
deps = await loader.async_get_integrations(hass, unseen_deps)
|
||||
for dependant_domain, dependant_itg in deps.items():
|
||||
deps = await loader.async_get_integrations(hass, manifest_deps)
|
||||
for dependant_itg in deps.values():
|
||||
if isinstance(dependant_itg, loader.Integration):
|
||||
integration_cache[dependant_domain] = dependant_itg
|
||||
needed_requirements.update(dependant_itg.requirements)
|
||||
|
||||
resolve_dependencies_tasks = [
|
||||
itg.resolve_dependencies()
|
||||
for itg in integrations_to_process
|
||||
if not itg.all_dependencies_resolved
|
||||
]
|
||||
|
||||
if resolve_dependencies_tasks:
|
||||
await asyncio.gather(*resolve_dependencies_tasks)
|
||||
|
||||
for itg in integrations_to_process:
|
||||
try:
|
||||
all_deps = itg.all_dependencies
|
||||
except RuntimeError:
|
||||
# Integration.all_dependencies raises RuntimeError if
|
||||
# dependencies could not be resolved
|
||||
continue
|
||||
for dep in all_deps:
|
||||
integration_cache[itg.domain] = itg
|
||||
|
||||
for dep in itg.all_dependencies:
|
||||
if dep in domains_to_setup:
|
||||
continue
|
||||
|
||||
domains_to_setup.add(dep)
|
||||
to_resolve.add(dep)
|
||||
|
||||
|
@ -928,62 +672,31 @@ async def _async_resolve_domains_to_setup(
|
|||
hass.async_create_background_task(
|
||||
requirements.async_load_installed_versions(hass, needed_requirements),
|
||||
"check installed requirements",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
#
|
||||
# Only add the domains_to_setup after we finish resolving
|
||||
# as new domains are likely to added in the process
|
||||
#
|
||||
translations_to_load.update(domains_to_setup)
|
||||
# Start loading translations for all integrations we are going to set up
|
||||
# in the background so they are ready when we need them. This avoids a
|
||||
# lot of waiting for the translation load lock and a thundering herd of
|
||||
# tasks trying to load the same translations at the same time as each
|
||||
# integration is loaded.
|
||||
#
|
||||
# We do not wait for this since as soon as the task runs it will
|
||||
# hold the translation load lock and if anything is fast enough to
|
||||
# wait for the translation load lock, loading will be done by the
|
||||
# time it gets to it.
|
||||
hass.async_create_background_task(
|
||||
translation.async_load_integrations(hass, translations_to_load),
|
||||
"load translations",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
# Preload storage for all integrations we are going to set up
|
||||
# so we do not have to wait for it to be loaded when we need it
|
||||
# in the setup process.
|
||||
hass.async_create_background_task(
|
||||
get_internal_store_manager(hass).async_preload(
|
||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
||||
),
|
||||
"preload storage",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
return domains_to_setup, integration_cache
|
||||
|
||||
|
||||
async def _async_set_up_integrations(
|
||||
hass: core.HomeAssistant, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Set up all the integrations."""
|
||||
watcher = _WatchPendingSetups(hass, _setup_started(hass))
|
||||
watcher.async_start()
|
||||
|
||||
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
||||
hass, config
|
||||
)
|
||||
|
||||
# Initialize recorder
|
||||
if "recorder" in domains_to_setup:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
pre_stage_domains = [
|
||||
(name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
|
||||
]
|
||||
# Load logging as soon as possible
|
||||
if logging_domains := domains_to_setup & LOGGING_INTEGRATIONS:
|
||||
_LOGGER.info("Setting up logging: %s", logging_domains)
|
||||
await async_setup_multi_components(hass, logging_domains, config)
|
||||
|
||||
# Setup frontend
|
||||
if frontend_domains := domains_to_setup & FRONTEND_INTEGRATIONS:
|
||||
_LOGGER.info("Setting up frontend: %s", frontend_domains)
|
||||
await async_setup_multi_components(hass, frontend_domains, config)
|
||||
|
||||
# Setup recorder
|
||||
if recorder_domains := domains_to_setup & RECORDER_INTEGRATIONS:
|
||||
_LOGGER.info("Setting up recorder: %s", recorder_domains)
|
||||
await async_setup_multi_components(hass, recorder_domains, config)
|
||||
|
||||
# Start up debuggers. Start these first in case they want to wait.
|
||||
if debuggers := domains_to_setup & DEBUGGER_INTEGRATIONS:
|
||||
_LOGGER.debug("Setting up debuggers: %s", debuggers)
|
||||
await async_setup_multi_components(hass, debuggers, config)
|
||||
|
||||
# calculate what components to setup in what stage
|
||||
stage_1_domains: set[str] = set()
|
||||
|
@ -1007,21 +720,14 @@ async def _async_set_up_integrations(
|
|||
|
||||
deps_promotion.update(dep_itg.all_dependencies)
|
||||
|
||||
stage_2_domains = domains_to_setup - stage_1_domains
|
||||
|
||||
for name, domain_group in pre_stage_domains:
|
||||
if domain_group:
|
||||
stage_2_domains -= domain_group
|
||||
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
||||
to_be_loaded = domain_group.copy()
|
||||
to_be_loaded.update(
|
||||
dep
|
||||
for domain in domain_group
|
||||
if (integration := integration_cache.get(domain)) is not None
|
||||
for dep in integration.all_dependencies
|
||||
)
|
||||
async_set_domains_to_be_loaded(hass, to_be_loaded)
|
||||
await async_setup_multi_components(hass, domain_group, config)
|
||||
stage_2_domains = (
|
||||
domains_to_setup
|
||||
- logging_domains
|
||||
- frontend_domains
|
||||
- recorder_domains
|
||||
- debuggers
|
||||
- stage_1_domains
|
||||
)
|
||||
|
||||
# Enables after dependencies when setting up stage 1 domains
|
||||
async_set_domains_to_be_loaded(hass, stage_1_domains)
|
||||
|
@ -1034,11 +740,8 @@ async def _async_set_up_integrations(
|
|||
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
|
||||
):
|
||||
await async_setup_multi_components(hass, stage_1_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for stage 1 waiting on %s - moving forward",
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Setup timed out for stage 1 - moving forward")
|
||||
|
||||
# Add after dependencies when setting up stage 2 domains
|
||||
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
||||
|
@ -1050,28 +753,26 @@ async def _async_set_up_integrations(
|
|||
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
||||
):
|
||||
await async_setup_multi_components(hass, stage_2_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for stage 2 waiting on %s - moving forward",
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Setup timed out for stage 2 - moving forward")
|
||||
|
||||
# Wrap up startup
|
||||
_LOGGER.debug("Waiting for startup to wrap up")
|
||||
try:
|
||||
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
||||
await hass.async_block_till_done()
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
"Setup timed out for bootstrap waiting on %s - moving forward",
|
||||
hass._active_tasks, # noqa: SLF001
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Setup timed out for bootstrap - moving forward")
|
||||
|
||||
watcher.async_stop()
|
||||
watch_task.cancel()
|
||||
async_dispatcher_send(hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, {})
|
||||
|
||||
if _LOGGER.isEnabledFor(logging.DEBUG):
|
||||
setup_time = async_get_setup_timings(hass)
|
||||
_LOGGER.debug(
|
||||
"Integration setup times: %s",
|
||||
dict(sorted(setup_time.items(), key=itemgetter(1), reverse=True)),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Integration setup times: %s",
|
||||
{
|
||||
integration: timedelta.total_seconds()
|
||||
for integration, timedelta in sorted(
|
||||
setup_time.items(), key=lambda item: item[1].total_seconds()
|
||||
)
|
||||
},
|
||||
)
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "ambient_weather",
|
||||
"name": "Ambient Weather",
|
||||
"integrations": ["ambient_network", "ambient_station"]
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "aqara",
|
||||
"name": "Aqara",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
}
|
5
homeassistant/brands/asterisk.json
Normal file
5
homeassistant/brands/asterisk.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"domain": "asterisk",
|
||||
"name": "Asterisk",
|
||||
"integrations": ["asterisk_cdr", "asterisk_mbox"]
|
||||
}
|
5
homeassistant/brands/epson.json
Normal file
5
homeassistant/brands/epson.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"domain": "epson",
|
||||
"name": "Epson",
|
||||
"integrations": ["epson", "epsonworkforce"]
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"domain": "eq3",
|
||||
"name": "eQ-3",
|
||||
"integrations": ["maxcube", "eq3btsmart"]
|
||||
"integrations": ["maxcube"]
|
||||
}
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "fujitsu",
|
||||
"name": "Fujitsu",
|
||||
"integrations": ["fujitsu_anywair", "fujitsu_fglair"]
|
||||
}
|
|
@ -5,10 +5,10 @@
|
|||
"google_assistant",
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_domains",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
"google_photos",
|
||||
"google_pubsub",
|
||||
"google_sheets",
|
||||
"google_tasks",
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "husqvarna",
|
||||
"name": "Husqvarna",
|
||||
"integrations": ["husqvarna_automower", "husqvarna_automower_ble"]
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"domain": "lg",
|
||||
"name": "LG",
|
||||
"integrations": ["lg_netcast", "lg_soundbar", "lg_thinq", "webostv"]
|
||||
"integrations": ["lg_netcast", "lg_soundbar", "webostv"]
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"domain": "logitech",
|
||||
"name": "Logitech",
|
||||
"integrations": ["harmony", "squeezebox"]
|
||||
"integrations": ["harmony", "ue_smart_radio", "squeezebox"]
|
||||
}
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "motionblinds",
|
||||
"name": "Motionblinds",
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "roth",
|
||||
"name": "Roth",
|
||||
"integrations": ["touchline", "touchline_sl"]
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "ruuvi",
|
||||
"name": "Ruuvi",
|
||||
"integrations": ["ruuvi_gateway", "ruuvitag_ble"]
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "sky",
|
||||
"name": "Sky",
|
||||
"integrations": ["sky_hub", "sky_remote"]
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"domain": "tesla",
|
||||
"name": "Tesla",
|
||||
"integrations": ["powerwall", "tesla_wall_connector", "tesla_fleet"]
|
||||
"integrations": ["powerwall", "tesla_wall_connector"]
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"domain": "tplink",
|
||||
"name": "TP-Link",
|
||||
"integrations": ["tplink", "tplink_omada", "tplink_lte", "tplink_tapo"],
|
||||
"integrations": ["tplink", "tplink_omada", "tplink_lte"],
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "weatherflow",
|
||||
"name": "WeatherFlow",
|
||||
"integrations": ["weatherflow", "weatherflow_cloud"]
|
||||
}
|
|
@ -1,11 +1,5 @@
|
|||
{
|
||||
"domain": "yale",
|
||||
"name": "Yale",
|
||||
"integrations": [
|
||||
"august",
|
||||
"yale_smart_alarm",
|
||||
"yalexs_ble",
|
||||
"yale_home",
|
||||
"yale"
|
||||
]
|
||||
"integrations": ["august", "yale_smart_alarm", "yalexs_ble", "yale_home"]
|
||||
}
|
||||
|
|
|
@ -6,3 +6,41 @@ Component design guidelines:
|
|||
format "<DOMAIN>.<OBJECT_ID>".
|
||||
- Each component should publish services only under its own domain.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.core import HomeAssistant, split_entity_id
|
||||
from homeassistant.helpers.group import expand_entity_ids
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_on(hass: HomeAssistant, entity_id: str | None = None) -> bool:
|
||||
"""Load up the module to call the is_on method.
|
||||
|
||||
If there is no entity id given we will check all.
|
||||
"""
|
||||
if entity_id:
|
||||
entity_ids = expand_entity_ids(hass, [entity_id])
|
||||
else:
|
||||
entity_ids = hass.states.entity_ids()
|
||||
|
||||
for ent_id in entity_ids:
|
||||
domain = split_entity_id(ent_id)[0]
|
||||
|
||||
try:
|
||||
component = getattr(hass.components, domain)
|
||||
|
||||
except ImportError:
|
||||
_LOGGER.error("Failed to call %s.is_on: component not found", domain)
|
||||
continue
|
||||
|
||||
if not hasattr(component, "is_on"):
|
||||
_LOGGER.warning("Integration %s has no is_on method", domain)
|
||||
continue
|
||||
|
||||
if component.is_on(ent_id):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
"""Support for the Abode Security System."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
from jaraco.abode.automation import Automation as AbodeAuto
|
||||
from jaraco.abode.client import Client as Abode
|
||||
import jaraco.abode.config
|
||||
from jaraco.abode.devices.base import Device as AbodeDev
|
||||
from jaraco.abode.exceptions import (
|
||||
AuthenticationException as AbodeAuthenticationException,
|
||||
Exception as AbodeException,
|
||||
|
@ -29,11 +28,11 @@ from homeassistant.const import (
|
|||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_POLLING, DOMAIN, LOGGER
|
||||
from .const import ATTRIBUTION, CONF_POLLING, DOMAIN, LOGGER
|
||||
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
|
@ -83,21 +82,12 @@ class AbodeSystem:
|
|||
logout_listener: CALLBACK_TYPE | None = None
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Abode component."""
|
||||
setup_hass_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Abode integration from a config entry."""
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
polling = entry.data[CONF_POLLING]
|
||||
|
||||
# Configure abode library to use config directory for storing data
|
||||
jaraco.abode.config.paths.override(user_data=Path(hass.config.path("Abode")))
|
||||
|
||||
# For previous config entries where unique_id is None
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(
|
||||
|
@ -120,6 +110,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
await setup_hass_events(hass)
|
||||
await hass.async_add_executor_job(setup_hass_services, hass)
|
||||
await hass.async_add_executor_job(setup_abode_events, hass)
|
||||
|
||||
return True
|
||||
|
@ -127,6 +118,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
hass.services.async_remove(DOMAIN, SERVICE_SETTINGS)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_CAPTURE_IMAGE)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_TRIGGER_AUTOMATION)
|
||||
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
await hass.async_add_executor_job(hass.data[DOMAIN].abode.events.stop)
|
||||
|
@ -179,15 +174,15 @@ def setup_hass_services(hass: HomeAssistant) -> None:
|
|||
signal = f"abode_trigger_automation_{entity_id}"
|
||||
dispatcher_send(hass, signal)
|
||||
|
||||
hass.services.async_register(
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_TRIGGER_AUTOMATION, trigger_automation, schema=AUTOMATION_SCHEMA
|
||||
)
|
||||
|
||||
|
@ -251,3 +246,108 @@ def setup_abode_events(hass: HomeAssistant) -> None:
|
|||
hass.data[DOMAIN].abode.events.add_event_callback(
|
||||
event, partial(event_callback, event)
|
||||
)
|
||||
|
||||
|
||||
class AbodeEntity(entity.Entity):
|
||||
"""Representation of an Abode entity."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, data: AbodeSystem) -> None:
|
||||
"""Initialize Abode entity."""
|
||||
self._data = data
|
||||
self._attr_should_poll = data.polling
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to Abode connection status updates."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.add_connection_status_callback,
|
||||
self.unique_id,
|
||||
self._update_connection_status,
|
||||
)
|
||||
|
||||
self.hass.data[DOMAIN].entity_ids.add(self.entity_id)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unsubscribe from Abode connection status updates."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.remove_connection_status_callback, self.unique_id
|
||||
)
|
||||
|
||||
def _update_connection_status(self) -> None:
|
||||
"""Update the entity available property."""
|
||||
self._attr_available = self._data.abode.events.connected
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
|
||||
class AbodeDevice(AbodeEntity):
|
||||
"""Representation of an Abode device."""
|
||||
|
||||
def __init__(self, data: AbodeSystem, device: AbodeDev) -> None:
|
||||
"""Initialize Abode device."""
|
||||
super().__init__(data)
|
||||
self._device = device
|
||||
self._attr_unique_id = device.uuid
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to device events."""
|
||||
await super().async_added_to_hass()
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.add_device_callback,
|
||||
self._device.id,
|
||||
self._update_callback,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unsubscribe from device events."""
|
||||
await super().async_will_remove_from_hass()
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.remove_all_device_callbacks, self._device.id
|
||||
)
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update device state."""
|
||||
self._device.refresh()
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
"device_id": self._device.id,
|
||||
"battery_low": self._device.battery_low,
|
||||
"no_response": self._device.no_response,
|
||||
"device_type": self._device.type,
|
||||
}
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device registry information for this entity."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self._device.id)},
|
||||
manufacturer="Abode",
|
||||
model=self._device.type,
|
||||
name=self._device.name,
|
||||
)
|
||||
|
||||
def _update_callback(self, device: AbodeDev) -> None:
|
||||
"""Update the device state."""
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
|
||||
class AbodeAutomation(AbodeEntity):
|
||||
"""Representation of an Abode automation."""
|
||||
|
||||
def __init__(self, data: AbodeSystem, automation: AbodeAuto) -> None:
|
||||
"""Initialize for Abode automation."""
|
||||
super().__init__(data)
|
||||
self._automation = automation
|
||||
self._attr_name = automation.name
|
||||
self._attr_unique_id = automation.automation_id
|
||||
self._attr_extra_state_attributes = {
|
||||
"type": "CUE automation",
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update automation state."""
|
||||
self._automation.refresh()
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
"""Support for Abode Security System alarm control panels."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from jaraco.abode.devices.alarm import Alarm
|
||||
from jaraco.abode.devices.alarm import Alarm as AbodeAl
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
import homeassistant.components.alarm_control_panel as alarm
|
||||
from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -28,7 +28,7 @@ async def async_setup_entry(
|
|||
)
|
||||
|
||||
|
||||
class AbodeAlarm(AbodeDevice, AlarmControlPanelEntity):
|
||||
class AbodeAlarm(AbodeDevice, alarm.AlarmControlPanelEntity):
|
||||
"""An alarm_control_panel implementation for Abode."""
|
||||
|
||||
_attr_name = None
|
||||
|
@ -37,17 +37,17 @@ class AbodeAlarm(AbodeDevice, AlarmControlPanelEntity):
|
|||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
)
|
||||
_device: Alarm
|
||||
_device: AbodeAl
|
||||
|
||||
@property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
def state(self) -> str | None:
|
||||
"""Return the state of the device."""
|
||||
if self._device.is_standby:
|
||||
return AlarmControlPanelState.DISARMED
|
||||
return STATE_ALARM_DISARMED
|
||||
if self._device.is_away:
|
||||
return AlarmControlPanelState.ARMED_AWAY
|
||||
return STATE_ALARM_ARMED_AWAY
|
||||
if self._device.is_home:
|
||||
return AlarmControlPanelState.ARMED_HOME
|
||||
return STATE_ALARM_ARMED_HOME
|
||||
return None
|
||||
|
||||
def alarm_disarm(self, code: str | None = None) -> None:
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"""Support for Abode Security System binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from jaraco.abode.devices.binary_sensor import BinarySensor
|
||||
from jaraco.abode.devices.sensor import BinarySensor as ABBinarySensor
|
||||
from jaraco.abode.helpers import constants as CONST
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
|
@ -15,9 +15,8 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -27,11 +26,11 @@ async def async_setup_entry(
|
|||
data: AbodeSystem = hass.data[DOMAIN]
|
||||
|
||||
device_types = [
|
||||
"connectivity",
|
||||
"moisture",
|
||||
"motion",
|
||||
"occupancy",
|
||||
"door",
|
||||
CONST.TYPE_CONNECTIVITY,
|
||||
CONST.TYPE_MOISTURE,
|
||||
CONST.TYPE_MOTION,
|
||||
CONST.TYPE_OCCUPANCY,
|
||||
CONST.TYPE_OPENING,
|
||||
]
|
||||
|
||||
async_add_entities(
|
||||
|
@ -44,7 +43,7 @@ class AbodeBinarySensor(AbodeDevice, BinarySensorEntity):
|
|||
"""A binary sensor implementation for Abode device."""
|
||||
|
||||
_attr_name = None
|
||||
_device: BinarySensor
|
||||
_device: ABBinarySensor
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
"""Support for Abode Security System cameras."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
from jaraco.abode.devices.base import Device
|
||||
from jaraco.abode.devices.base import Device as AbodeDev
|
||||
from jaraco.abode.devices.camera import Camera as AbodeCam
|
||||
from jaraco.abode.helpers import timeline
|
||||
from jaraco.abode.helpers import constants as CONST, timeline as TIMELINE
|
||||
import requests
|
||||
from requests.models import Response
|
||||
|
||||
|
@ -18,9 +17,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .entity import AbodeDevice
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
|
||||
|
||||
|
@ -32,8 +30,8 @@ async def async_setup_entry(
|
|||
data: AbodeSystem = hass.data[DOMAIN]
|
||||
|
||||
async_add_entities(
|
||||
AbodeCamera(data, device, timeline.CAPTURE_IMAGE)
|
||||
for device in data.abode.get_devices(generic_type="camera")
|
||||
AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE)
|
||||
for device in data.abode.get_devices(generic_type=CONST.TYPE_CAMERA)
|
||||
)
|
||||
|
||||
|
||||
|
@ -43,7 +41,7 @@ class AbodeCamera(AbodeDevice, Camera):
|
|||
_device: AbodeCam
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, data: AbodeSystem, device: Device, event: Event) -> None:
|
||||
def __init__(self, data: AbodeSystem, device: AbodeDev, event: Event) -> None:
|
||||
"""Initialize the Abode device."""
|
||||
AbodeDevice.__init__(self, data, device)
|
||||
Camera.__init__(self)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Config flow for the Abode Security System component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
|
@ -15,15 +14,16 @@ from jaraco.abode.helpers.errors import MFA_CODE_REQUIRED
|
|||
from requests.exceptions import ConnectTimeout, HTTPError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from .const import CONF_POLLING, DOMAIN, LOGGER
|
||||
|
||||
CONF_MFA = "mfa_code"
|
||||
|
||||
|
||||
class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
class AbodeFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for Abode."""
|
||||
|
||||
VERSION = 1
|
||||
|
@ -43,7 +43,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
self._polling: bool = False
|
||||
self._username: str | None = None
|
||||
|
||||
async def _async_abode_login(self, step_id: str) -> ConfigFlowResult:
|
||||
async def _async_abode_login(self, step_id: str) -> FlowResult:
|
||||
"""Handle login with Abode."""
|
||||
errors = {}
|
||||
|
||||
|
@ -74,7 +74,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
return await self._async_create_entry()
|
||||
|
||||
async def _async_abode_mfa_login(self) -> ConfigFlowResult:
|
||||
async def _async_abode_mfa_login(self) -> FlowResult:
|
||||
"""Handle multi-factor authentication (MFA) login with Abode."""
|
||||
try:
|
||||
# Create instance to access login method for passing MFA code
|
||||
|
@ -92,7 +92,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
return await self._async_create_entry()
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
async def _async_create_entry(self) -> FlowResult:
|
||||
"""Create the config entry."""
|
||||
config_data = {
|
||||
CONF_USERNAME: self._username,
|
||||
|
@ -102,7 +102,15 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
existing_entry = await self.async_set_unique_id(self._username)
|
||||
|
||||
if existing_entry:
|
||||
return self.async_update_reload_and_abort(existing_entry, data=config_data)
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry, data=config_data
|
||||
)
|
||||
# Reload the Abode config entry otherwise devices will remain unavailable
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||
)
|
||||
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=cast(str, self._username), data=config_data
|
||||
|
@ -110,7 +118,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
@ -127,7 +135,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
async def async_step_mfa(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle a multi-factor authentication (MFA) flow."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
|
@ -138,9 +146,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
return await self._async_abode_mfa_login()
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
|
||||
"""Handle reauthorization request from Abode."""
|
||||
self._username = entry_data[CONF_USERNAME]
|
||||
|
||||
|
@ -148,7 +154,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
) -> FlowResult:
|
||||
"""Handle reauthorization flow."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Constants for the Abode Security System component."""
|
||||
|
||||
import logging
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
"""Support for Abode Security System covers."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from jaraco.abode.devices.cover import Cover
|
||||
from jaraco.abode.devices.cover import Cover as AbodeCV
|
||||
from jaraco.abode.helpers import constants as CONST
|
||||
|
||||
from homeassistant.components.cover import CoverEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -22,14 +21,14 @@ async def async_setup_entry(
|
|||
|
||||
async_add_entities(
|
||||
AbodeCover(data, device)
|
||||
for device in data.abode.get_devices(generic_type="cover")
|
||||
for device in data.abode.get_devices(generic_type=CONST.TYPE_COVER)
|
||||
)
|
||||
|
||||
|
||||
class AbodeCover(AbodeDevice, CoverEntity):
|
||||
"""Representation of an Abode cover."""
|
||||
|
||||
_device: Cover
|
||||
_device: AbodeCV
|
||||
_attr_name = None
|
||||
|
||||
@property
|
||||
|
|
|
@ -1,115 +0,0 @@
|
|||
"""Support for Abode Security System entities."""
|
||||
|
||||
from jaraco.abode.automation import Automation as AbodeAuto
|
||||
from jaraco.abode.devices.base import Device as AbodeDev
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import AbodeSystem
|
||||
from .const import ATTRIBUTION, DOMAIN
|
||||
|
||||
|
||||
class AbodeEntity(Entity):
|
||||
"""Representation of an Abode entity."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, data: AbodeSystem) -> None:
|
||||
"""Initialize Abode entity."""
|
||||
self._data = data
|
||||
self._attr_should_poll = data.polling
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to Abode connection status updates."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.add_connection_status_callback,
|
||||
self.unique_id,
|
||||
self._update_connection_status,
|
||||
)
|
||||
|
||||
self.hass.data[DOMAIN].entity_ids.add(self.entity_id)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unsubscribe from Abode connection status updates."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.remove_connection_status_callback, self.unique_id
|
||||
)
|
||||
|
||||
def _update_connection_status(self) -> None:
|
||||
"""Update the entity available property."""
|
||||
self._attr_available = self._data.abode.events.connected
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
|
||||
class AbodeDevice(AbodeEntity):
|
||||
"""Representation of an Abode device."""
|
||||
|
||||
def __init__(self, data: AbodeSystem, device: AbodeDev) -> None:
|
||||
"""Initialize Abode device."""
|
||||
super().__init__(data)
|
||||
self._device = device
|
||||
self._attr_unique_id = device.uuid
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to device events."""
|
||||
await super().async_added_to_hass()
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.add_device_callback,
|
||||
self._device.id,
|
||||
self._update_callback,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Unsubscribe from device events."""
|
||||
await super().async_will_remove_from_hass()
|
||||
await self.hass.async_add_executor_job(
|
||||
self._data.abode.events.remove_all_device_callbacks, self._device.id
|
||||
)
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update device state."""
|
||||
self._device.refresh()
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str]:
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
"device_id": self._device.id,
|
||||
"battery_low": self._device.battery_low,
|
||||
"no_response": self._device.no_response,
|
||||
"device_type": self._device.type,
|
||||
}
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device registry information for this entity."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self._device.id)},
|
||||
manufacturer="Abode",
|
||||
model=self._device.type,
|
||||
name=self._device.name,
|
||||
)
|
||||
|
||||
def _update_callback(self, device: AbodeDev) -> None:
|
||||
"""Update the device state."""
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
|
||||
class AbodeAutomation(AbodeEntity):
|
||||
"""Representation of an Abode automation."""
|
||||
|
||||
def __init__(self, data: AbodeSystem, automation: AbodeAuto) -> None:
|
||||
"""Initialize for Abode automation."""
|
||||
super().__init__(data)
|
||||
self._automation = automation
|
||||
self._attr_name = automation.name
|
||||
self._attr_unique_id = automation.id
|
||||
self._attr_extra_state_attributes = {
|
||||
"type": "CUE automation",
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update automation state."""
|
||||
self._automation.refresh()
|
|
@ -5,16 +5,5 @@
|
|||
"default": "mdi:robot"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"capture_image": {
|
||||
"service": "mdi:camera"
|
||||
},
|
||||
"change_setting": {
|
||||
"service": "mdi:cog"
|
||||
},
|
||||
"trigger_automation": {
|
||||
"service": "mdi:play"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
"""Support for Abode Security System lights."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from math import ceil
|
||||
from typing import Any
|
||||
|
||||
from jaraco.abode.devices.light import Light
|
||||
from jaraco.abode.devices.light import Light as AbodeLT
|
||||
from jaraco.abode.helpers import constants as CONST
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
|
@ -22,9 +22,8 @@ from homeassistant.util.color import (
|
|||
color_temperature_mired_to_kelvin,
|
||||
)
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -35,14 +34,14 @@ async def async_setup_entry(
|
|||
|
||||
async_add_entities(
|
||||
AbodeLight(data, device)
|
||||
for device in data.abode.get_devices(generic_type="light")
|
||||
for device in data.abode.get_devices(generic_type=CONST.TYPE_LIGHT)
|
||||
)
|
||||
|
||||
|
||||
class AbodeLight(AbodeDevice, LightEntity):
|
||||
"""Representation of an Abode light."""
|
||||
|
||||
_device: Light
|
||||
_device: AbodeLT
|
||||
_attr_name = None
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
"""Support for the Abode Security System locks."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from jaraco.abode.devices.lock import Lock
|
||||
from jaraco.abode.devices.lock import Lock as AbodeLK
|
||||
from jaraco.abode.helpers import constants as CONST
|
||||
|
||||
from homeassistant.components.lock import LockEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN
|
||||
from .entity import AbodeDevice
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -22,14 +21,14 @@ async def async_setup_entry(
|
|||
|
||||
async_add_entities(
|
||||
AbodeLock(data, device)
|
||||
for device in data.abode.get_devices(generic_type="lock")
|
||||
for device in data.abode.get_devices(generic_type=CONST.TYPE_LOCK)
|
||||
)
|
||||
|
||||
|
||||
class AbodeLock(AbodeDevice, LockEntity):
|
||||
"""Representation of an Abode lock."""
|
||||
|
||||
_device: Lock
|
||||
_device: AbodeLK
|
||||
_attr_name = None
|
||||
|
||||
def lock(self, **kwargs: Any) -> None:
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
},
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["jaraco.abode", "lomond"],
|
||||
"requirements": ["jaraco.abode==6.2.1"]
|
||||
"requirements": ["jaraco.abode==3.3.0", "jaraco.functools==3.9.0"]
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
"""Support for Abode Security System sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
|
||||
from jaraco.abode.devices.sensor import Sensor
|
||||
from jaraco.abode.devices.sensor import Sensor as AbodeSense
|
||||
from jaraco.abode.helpers import constants as CONST
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
|
@ -18,27 +18,31 @@ from homeassistant.const import LIGHT_LUX, PERCENTAGE, UnitOfTemperature
|
|||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN
|
||||
from .entity import AbodeDevice
|
||||
|
||||
ABODE_TEMPERATURE_UNIT_HA_UNIT = {
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
CONST.UNIT_FAHRENHEIT: UnitOfTemperature.FAHRENHEIT,
|
||||
CONST.UNIT_CELSIUS: UnitOfTemperature.CELSIUS,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AbodeSensorDescription(SensorEntityDescription):
|
||||
"""Class describing Abode sensor entities."""
|
||||
@dataclass(frozen=True)
|
||||
class AbodeSensorDescriptionMixin:
|
||||
"""Mixin for Abode sensor."""
|
||||
|
||||
value_fn: Callable[[Sensor], float]
|
||||
native_unit_of_measurement_fn: Callable[[Sensor], str]
|
||||
value_fn: Callable[[AbodeSense], float]
|
||||
native_unit_of_measurement_fn: Callable[[AbodeSense], str]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AbodeSensorDescription(SensorEntityDescription, AbodeSensorDescriptionMixin):
|
||||
"""Class describing Abode sensor entities."""
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[AbodeSensorDescription, ...] = (
|
||||
AbodeSensorDescription(
|
||||
key="temperature",
|
||||
key=CONST.TEMP_STATUS_KEY,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement_fn=lambda device: ABODE_TEMPERATURE_UNIT_HA_UNIT[
|
||||
device.temp_unit
|
||||
|
@ -46,13 +50,13 @@ SENSOR_TYPES: tuple[AbodeSensorDescription, ...] = (
|
|||
value_fn=lambda device: cast(float, device.temp),
|
||||
),
|
||||
AbodeSensorDescription(
|
||||
key="humidity",
|
||||
key=CONST.HUMI_STATUS_KEY,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement_fn=lambda _: PERCENTAGE,
|
||||
value_fn=lambda device: cast(float, device.humidity),
|
||||
),
|
||||
AbodeSensorDescription(
|
||||
key="lux",
|
||||
key=CONST.LUX_STATUS_KEY,
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement_fn=lambda _: LIGHT_LUX,
|
||||
value_fn=lambda device: cast(float, device.lux),
|
||||
|
@ -69,8 +73,8 @@ async def async_setup_entry(
|
|||
async_add_entities(
|
||||
AbodeSensor(data, device, description)
|
||||
for description in SENSOR_TYPES
|
||||
for device in data.abode.get_devices(generic_type="sensor")
|
||||
if description.key in device.get_value("statuses")
|
||||
for device in data.abode.get_devices(generic_type=CONST.TYPE_SENSOR)
|
||||
if description.key in device.get_value(CONST.STATUSES_KEY)
|
||||
)
|
||||
|
||||
|
||||
|
@ -78,12 +82,12 @@ class AbodeSensor(AbodeDevice, SensorEntity):
|
|||
"""A sensor implementation for Abode devices."""
|
||||
|
||||
entity_description: AbodeSensorDescription
|
||||
_device: Sensor
|
||||
_device: AbodeSense
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: AbodeSystem,
|
||||
device: Sensor,
|
||||
device: AbodeSense,
|
||||
description: AbodeSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize a sensor for an Abode device."""
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"""Support for Abode Security System switches."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, cast
|
||||
|
||||
from jaraco.abode.devices.switch import Switch
|
||||
from jaraco.abode.devices.switch import Switch as AbodeSW
|
||||
from jaraco.abode.helpers import constants as CONST
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -12,11 +12,10 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AbodeSystem
|
||||
from . import AbodeAutomation, AbodeDevice, AbodeSystem
|
||||
from .const import DOMAIN
|
||||
from .entity import AbodeAutomation, AbodeDevice
|
||||
|
||||
DEVICE_TYPES = ["switch", "valve"]
|
||||
DEVICE_TYPES = [CONST.TYPE_SWITCH, CONST.TYPE_VALVE]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
@ -42,7 +41,7 @@ async def async_setup_entry(
|
|||
class AbodeSwitch(AbodeDevice, SwitchEntity):
|
||||
"""Representation of an Abode switch."""
|
||||
|
||||
_device: Switch
|
||||
_device: AbodeSW
|
||||
_attr_name = None
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
|
@ -88,4 +87,4 @@ class AbodeAutomationSwitch(AbodeAutomation, SwitchEntity):
|
|||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the automation is enabled."""
|
||||
return bool(self._automation.enabled)
|
||||
return bool(self._automation.is_enabled)
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
"""Initialize the Acaia component."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Set up acaia as config entry."""
|
||||
|
||||
coordinator = AcaiaCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
|
@ -1,61 +0,0 @@
|
|||
"""Button entities for Acaia scales."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Description for acaia button entities."""
|
||||
|
||||
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
|
||||
AcaiaButtonEntityDescription(
|
||||
key="tare",
|
||||
translation_key="tare",
|
||||
press_fn=lambda scale: scale.tare(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="reset_timer",
|
||||
translation_key="reset_timer",
|
||||
press_fn=lambda scale: scale.reset_timer(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="start_stop",
|
||||
translation_key="start_stop",
|
||||
press_fn=lambda scale: scale.start_stop_timer(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities and services."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
|
||||
|
||||
|
||||
class AcaiaButton(AcaiaEntity, ButtonEntity):
|
||||
"""Representation of an Acaia button."""
|
||||
|
||||
entity_description: AcaiaButtonEntityDescription
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
await self.entity_description.press_fn(self._scale)
|
|
@ -1,149 +0,0 @@
|
|||
"""Config flow for Acaia integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||
from aioacaia.helpers import is_new_scale
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_NAME
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for acaia."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered: dict[str, Any] = {}
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
mac = format_mac(user_input[CONF_ADDRESS])
|
||||
try:
|
||||
is_new_style_scale = await is_new_scale(mac)
|
||||
except AcaiaDeviceNotFound:
|
||||
errors["base"] = "device_not_found"
|
||||
except AcaiaError:
|
||||
_LOGGER.exception("Error occurred while connecting to the scale")
|
||||
errors["base"] = "unknown"
|
||||
except AcaiaUnknownDevice:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
else:
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[user_input[CONF_ADDRESS]],
|
||||
data={
|
||||
CONF_ADDRESS: mac,
|
||||
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||
},
|
||||
)
|
||||
|
||||
for device in async_discovered_service_info(self.hass):
|
||||
self._discovered_devices[device.address] = device.name
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
options = [
|
||||
SelectOptionDict(
|
||||
value=device_mac,
|
||||
label=f"{device_name} ({device_mac})",
|
||||
)
|
||||
for device_mac, device_name in self._discovered_devices.items()
|
||||
]
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Bluetooth device."""
|
||||
|
||||
self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address)
|
||||
self._discovered[CONF_NAME] = discovery_info.name
|
||||
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
|
||||
discovery_info.address
|
||||
)
|
||||
except AcaiaDeviceNotFound:
|
||||
_LOGGER.debug("Device not found during discovery")
|
||||
return self.async_abort(reason="device_not_found")
|
||||
except AcaiaError:
|
||||
_LOGGER.debug(
|
||||
"Error occurred while connecting to the scale during discovery",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.async_abort(reason="unknown")
|
||||
except AcaiaUnknownDevice:
|
||||
_LOGGER.debug("Unsupported device during discovery")
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
async def async_step_bluetooth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle confirmation of Bluetooth discovery."""
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered[CONF_NAME],
|
||||
data={
|
||||
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
|
||||
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
|
||||
},
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = placeholders = {
|
||||
CONF_NAME: self._discovered[CONF_NAME]
|
||||
}
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
description_placeholders=placeholders,
|
||||
)
|
|
@ -1,4 +0,0 @@
|
|||
"""Constants for component."""
|
||||
|
||||
DOMAIN = "acaia"
|
||||
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"
|
|
@ -1,86 +0,0 @@
|
|||
"""Coordinator for Acaia integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
|
||||
|
||||
|
||||
class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to handle fetching data from the scale."""
|
||||
|
||||
config_entry: AcaiaConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="acaia coordinator",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
self._scale = AcaiaScale(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=self.async_update_listeners,
|
||||
)
|
||||
|
||||
@property
|
||||
def scale(self) -> AcaiaScale:
|
||||
"""Return the scale object."""
|
||||
return self._scale
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data."""
|
||||
|
||||
# scale is already connected, return
|
||||
if self._scale.connected:
|
||||
return
|
||||
|
||||
# scale is not connected, try to connect
|
||||
try:
|
||||
await self._scale.connect(setup_tasks=False)
|
||||
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
|
||||
_LOGGER.debug(
|
||||
"Could not connect to scale: %s, Error: %s",
|
||||
self.config_entry.data[CONF_ADDRESS],
|
||||
ex,
|
||||
)
|
||||
self._scale.device_disconnected_handler(notify=False)
|
||||
return
|
||||
|
||||
# connected, set up background tasks
|
||||
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
|
||||
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.send_heartbeats(),
|
||||
name="acaia_heartbeat_task",
|
||||
)
|
||||
|
||||
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
|
||||
self._scale.process_queue_task = (
|
||||
self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.process_queue(),
|
||||
name="acaia_process_queue_task",
|
||||
)
|
||||
)
|
|
@ -1,40 +0,0 @@
|
|||
"""Base class for Acaia entities."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AcaiaCoordinator
|
||||
|
||||
|
||||
@dataclass
|
||||
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||
"""Common elements for all entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AcaiaCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._scale = coordinator.scale
|
||||
self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._scale.mac)},
|
||||
manufacturer="Acaia",
|
||||
model=self._scale.model,
|
||||
suggested_area="Kitchen",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Returns whether entity is available."""
|
||||
return super().available and self._scale.connected
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"default": "mdi:scale-balance"
|
||||
},
|
||||
"reset_timer": {
|
||||
"default": "mdi:timer-refresh"
|
||||
},
|
||||
"start_stop": {
|
||||
"default": "mdi:timer-play"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"domain": "acaia",
|
||||
"name": "Acaia",
|
||||
"bluetooth": [
|
||||
{
|
||||
"manufacturer_id": 16962
|
||||
},
|
||||
{
|
||||
"local_name": "ACAIA*"
|
||||
},
|
||||
{
|
||||
"local_name": "PYXIS-*"
|
||||
},
|
||||
{
|
||||
"local_name": "LUNAR-*"
|
||||
},
|
||||
{
|
||||
"local_name": "PROCHBT001"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/acaia",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"requirements": ["aioacaia==0.1.6"]
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"unsupported_device": "This device is not supported."
|
||||
},
|
||||
"error": {
|
||||
"device_not_found": "Device could not be found.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
},
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"name": "Tare"
|
||||
},
|
||||
"reset_timer": {
|
||||
"name": "Reset timer"
|
||||
},
|
||||
"start_stop": {
|
||||
"name": "Start/stop timer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue