Compare commits
8 commits
dev
...
synesthesi
Author | SHA1 | Date | |
---|---|---|---|
|
898bb56519 | ||
|
1a6affc426 | ||
|
93cc266b06 | ||
|
f0c49b3995 | ||
|
d375bfaefe | ||
|
7fe4a52d59 | ||
|
a51de1df3c | ||
|
644427ecc7 |
6404 changed files with 81736 additions and 295660 deletions
|
@ -14,7 +14,6 @@ core: &core
|
|||
base_platforms: &base_platforms
|
||||
- homeassistant/components/air_quality/**
|
||||
- homeassistant/components/alarm_control_panel/**
|
||||
- homeassistant/components/assist_satellite/**
|
||||
- homeassistant/components/binary_sensor/**
|
||||
- homeassistant/components/button/**
|
||||
- homeassistant/components/calendar/**
|
||||
|
@ -62,7 +61,6 @@ components: &components
|
|||
- homeassistant/components/auth/**
|
||||
- homeassistant/components/automation/**
|
||||
- homeassistant/components/backup/**
|
||||
- homeassistant/components/blueprint/**
|
||||
- homeassistant/components/bluetooth/**
|
||||
- homeassistant/components/cloud/**
|
||||
- homeassistant/components/config/**
|
||||
|
@ -79,7 +77,6 @@ components: &components
|
|||
- homeassistant/components/group/**
|
||||
- homeassistant/components/hassio/**
|
||||
- homeassistant/components/homeassistant/**
|
||||
- homeassistant/components/homeassistant_hardware/**
|
||||
- homeassistant/components/http/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/input_boolean/**
|
||||
|
@ -112,7 +109,6 @@ components: &components
|
|||
- homeassistant/components/tag/**
|
||||
- homeassistant/components/template/**
|
||||
- homeassistant/components/timer/**
|
||||
- homeassistant/components/trace/**
|
||||
- homeassistant/components/usb/**
|
||||
- homeassistant/components/webhook/**
|
||||
- homeassistant/components/websocket_api/**
|
||||
|
@ -128,12 +124,9 @@ tests: &tests
|
|||
- tests/*.py
|
||||
- tests/auth/**
|
||||
- tests/backports/**
|
||||
- tests/components/conftest.py
|
||||
- tests/components/diagnostics/**
|
||||
- tests/components/history/**
|
||||
- tests/components/logbook/**
|
||||
- tests/components/recorder/**
|
||||
- tests/components/repairs/**
|
||||
- tests/components/sensor/**
|
||||
- tests/hassfest/**
|
||||
- tests/helpers/**
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"name": "Home Assistant Dev",
|
||||
"context": "..",
|
||||
"dockerFile": "../Dockerfile.dev",
|
||||
"postCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder} && script/setup",
|
||||
"postCreateCommand": "script/setup",
|
||||
"postStartCommand": "script/bootstrap",
|
||||
"containerEnv": {
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
|
@ -12,12 +12,7 @@
|
|||
},
|
||||
// Port 5683 udp is used by Shelly integration
|
||||
"appPort": ["8123:8123", "5683:5683/udp"],
|
||||
"runArgs": [
|
||||
"-e",
|
||||
"GIT_EDITOR=code --wait",
|
||||
"--security-opt",
|
||||
"label=disable"
|
||||
],
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait"],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
|
@ -58,13 +53,7 @@
|
|||
],
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
},
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ docs
|
|||
# Development
|
||||
.devcontainer
|
||||
.vscode
|
||||
.tool-versions
|
||||
|
||||
# Test related files
|
||||
tests
|
||||
|
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
|
@ -1 +1,2 @@
|
|||
custom: https://www.openhomefoundation.org
|
||||
custom: https://www.nabucasa.com
|
||||
github: balloob
|
||||
|
|
80
.github/workflows/builder.yml
vendored
80
.github/workflows/builder.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
|||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
|
@ -27,12 +27,12 @@ jobs:
|
|||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
@ -69,7 +69,7 @@ jobs:
|
|||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
|
@ -90,7 +90,7 @@ jobs:
|
|||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
|
@ -116,7 +116,7 @@ jobs:
|
|||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
@ -126,7 +126,7 @@ jobs:
|
|||
env:
|
||||
UV_PRERELEASE: allow
|
||||
run: |
|
||||
python3 -m pip install "$(grep '^uv' < requirements.txt)"
|
||||
python3 -m pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install packaging tomli
|
||||
uv pip install .
|
||||
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
|
||||
|
@ -242,7 +242,7 @@ jobs:
|
|||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
|
@ -279,7 +279,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
|
@ -316,15 +316,14 @@ jobs:
|
|||
packages: write
|
||||
id-token: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
uses: sigstore/cosign-installer@v3.6.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
|
@ -451,10 +450,10 @@ jobs:
|
|||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
@ -483,56 +482,3 @@ jobs:
|
|||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
|
||||
hassfest-image:
|
||||
name: Build and test hassfest image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
needs: ["init"]
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
env:
|
||||
HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
load: true
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||
|
||||
- name: Run hassfest against core
|
||||
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
|
||||
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
push: true
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
|
221
.github/workflows/ci.yaml
vendored
221
.github/workflows/ci.yaml
vendored
|
@ -37,12 +37,12 @@ on:
|
|||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 11
|
||||
CACHE_VERSION: 10
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2024.12"
|
||||
MYPY_CACHE_VERSION: 8
|
||||
HA_SHORT_VERSION: "2024.9"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
|
@ -93,7 +93,7 @@ jobs:
|
|||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
|
@ -231,16 +231,16 @@ jobs:
|
|||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
|
@ -252,11 +252,11 @@ jobs:
|
|||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
|
@ -277,16 +277,16 @@ jobs:
|
|||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -295,7 +295,7 @@ jobs:
|
|||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
|
@ -317,16 +317,16 @@ jobs:
|
|||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -335,7 +335,7 @@ jobs:
|
|||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
|
@ -357,16 +357,16 @@ jobs:
|
|||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -375,7 +375,7 @@ jobs:
|
|||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
|
@ -429,32 +429,17 @@ jobs:
|
|||
. venv/bin/activate
|
||||
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
|
||||
|
||||
lint-hadolint:
|
||||
name: Check ${{ matrix.file }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- info
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
file:
|
||||
- Dockerfile
|
||||
- Dockerfile.dev
|
||||
- script/hassfest/docker/Dockerfile
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
- name: Check ${{ matrix.file }}
|
||||
uses: docker://hadolint/hadolint:v2.12.0
|
||||
- name: Check Dockerfile
|
||||
uses: docker://hadolint/hadolint:v1.18.2
|
||||
with:
|
||||
args: hadolint ${{ matrix.file }}
|
||||
args: hadolint Dockerfile
|
||||
- name: Check Dockerfile.dev
|
||||
uses: docker://hadolint/hadolint:v1.18.2
|
||||
with:
|
||||
args: hadolint Dockerfile.dev
|
||||
|
||||
base:
|
||||
name: Prepare dependencies
|
||||
|
@ -466,23 +451,23 @@ jobs:
|
|||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Generate partial uv restore key
|
||||
id: generate-uv-key
|
||||
run: |
|
||||
uv_version=$(cat requirements.txt | grep uv | cut -d '=' -f 3)
|
||||
uv_version=$(cat requirements_test.txt | grep uv | cut -d '=' -f 3)
|
||||
echo "version=${uv_version}" >> $GITHUB_OUTPUT
|
||||
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
lookup-only: true
|
||||
|
@ -491,7 +476,7 @@ jobs:
|
|||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
|
@ -525,7 +510,7 @@ jobs:
|
|||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install -U "pip>=21.3.1" setuptools wheel
|
||||
uv pip install -r requirements.txt
|
||||
python -m script.gen_requirements_all ci
|
||||
|
@ -550,16 +535,16 @@ jobs:
|
|||
sudo apt-get -y install \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -583,16 +568,16 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -615,41 +600,37 @@ jobs:
|
|||
&& github.event.inputs.mypy-only != 'true'
|
||||
|| github.event.inputs.audit-licenses-only == 'true')
|
||||
&& needs.info.outputs.requirements == 'true'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Extract license data
|
||||
- name: Run pip-licenses
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
pip-licenses --format=json --output-file=licenses.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
- name: Check licenses
|
||||
name: licenses
|
||||
path: licenses.json
|
||||
- name: Process licenses
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.licenses check licenses-${{ matrix.python-version }}.json
|
||||
python -m script.licenses
|
||||
|
||||
pylint:
|
||||
name: Check pylint
|
||||
|
@ -664,16 +645,16 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -711,16 +692,16 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -756,10 +737,10 @@ jobs:
|
|||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
@ -772,7 +753,7 @@ jobs:
|
|||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -780,7 +761,7 @@ jobs:
|
|||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.0.2
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
|
@ -831,16 +812,16 @@ jobs:
|
|||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -852,7 +833,7 @@ jobs:
|
|||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
|
@ -895,16 +876,16 @@ jobs:
|
|||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -944,8 +925,7 @@ jobs:
|
|||
-qq \
|
||||
--timeout=9 \
|
||||
--durations=10 \
|
||||
--numprocesses auto \
|
||||
--snapshot-details \
|
||||
-n auto \
|
||||
--dist=loadfile \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
|
@ -954,14 +934,14 @@ jobs:
|
|||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
|
@ -1016,16 +996,16 @@ jobs:
|
|||
libturbojpeg \
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -1067,8 +1047,7 @@ jobs:
|
|||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=20 \
|
||||
--numprocesses 1 \
|
||||
--snapshot-details \
|
||||
-n 1 \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=10 \
|
||||
|
@ -1081,7 +1060,7 @@ jobs:
|
|||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
|
@ -1089,7 +1068,7 @@ jobs:
|
|||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
|
@ -1100,7 +1079,7 @@ jobs:
|
|||
./script/check_dirty
|
||||
|
||||
pytest-postgres:
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
services:
|
||||
postgres:
|
||||
image: ${{ matrix.postgresql-group }}
|
||||
|
@ -1140,21 +1119,19 @@ jobs:
|
|||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg
|
||||
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||
sudo apt-get -y install \
|
||||
libturbojpeg \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -1196,8 +1173,7 @@ jobs:
|
|||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
--numprocesses 1 \
|
||||
--snapshot-details \
|
||||
-n 1 \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
|
@ -1211,7 +1187,7 @@ jobs:
|
|||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
|
@ -1219,7 +1195,7 @@ jobs:
|
|||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
|
@ -1241,14 +1217,14 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v4.6.0
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
|
@ -1292,16 +1268,16 @@ jobs:
|
|||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.0.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
@ -1343,8 +1319,7 @@ jobs:
|
|||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
--numprocesses auto \
|
||||
--snapshot-details \
|
||||
-n auto \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
|
@ -1354,14 +1329,14 @@ jobs:
|
|||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
|
@ -1380,14 +1355,14 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v4.6.0
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
|
@ -21,14 +21,14 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.3
|
||||
uses: github/codeql-action/init@v3.26.4
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.3
|
||||
uses: github/codeql-action/analyze@v3.26.4
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
|
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
|
@ -19,10 +19,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
|
83
.github/workflows/wheels.yml
vendored
83
.github/workflows/wheels.yml
vendored
|
@ -32,11 +32,11 @@ jobs:
|
|||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v5.1.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
@ -46,7 +46,7 @@ jobs:
|
|||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
pip install "$(grep '^uv' < requirements_test.txt)"
|
||||
uv pip install -r requirements.txt
|
||||
|
||||
- name: Get information
|
||||
|
@ -64,8 +64,11 @@ jobs:
|
|||
- name: Write env-file
|
||||
run: |
|
||||
(
|
||||
echo "GRPC_BUILD_WITH_BORING_SSL_ASM=false"
|
||||
echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true"
|
||||
echo "GRPC_PYTHON_BUILD_WITH_CYTHON=true"
|
||||
echo "GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=true"
|
||||
echo "GRPC_PYTHON_LDFLAGS=-lpthread -Wl,-wrap,memcpy -static-libgcc"
|
||||
|
||||
# Fix out of memory issues with rust
|
||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
||||
|
@ -79,15 +82,14 @@ jobs:
|
|||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
|
@ -99,7 +101,7 @@ jobs:
|
|||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
uses: actions/upload-artifact@v4.3.6
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
|
@ -112,11 +114,11 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312", "cp313"]
|
||||
abi: ["cp312"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
|
@ -128,22 +130,16 @@ jobs:
|
|||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
||||
skip-binary: aiohttp
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements.txt"
|
||||
|
@ -156,11 +152,11 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312", "cp313"]
|
||||
abi: ["cp312"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
|
@ -177,18 +173,6 @@ jobs:
|
|||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
if [ "${{ matrix.arch }}" = "i386" ]; then
|
||||
echo "NPY_DISABLE_SVML=1" >> .env_file
|
||||
fi
|
||||
|
||||
# Do not pin numpy in wheels building
|
||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
|
@ -198,19 +182,28 @@ jobs:
|
|||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
# Build these first.
|
||||
# grpcio: https://github.com/grpc/grpc/issues/33918
|
||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
||||
|
||||
touch requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
if [ "${{ matrix.arch }}" = "i386" ]; then
|
||||
echo "NPY_DISABLE_SVML=1" >> .env_file
|
||||
fi
|
||||
|
||||
# Do not pin numpy in wheels building
|
||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
@ -218,50 +211,50 @@ jobs:
|
|||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_old-cython.txt"
|
||||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -79,7 +79,6 @@ pytest-*.txt
|
|||
.pydevproject
|
||||
|
||||
.python-version
|
||||
.tool-versions
|
||||
|
||||
# emacs auto backups
|
||||
*~
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.3
|
||||
rev: v0.6.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@ -83,14 +83,14 @@ repos:
|
|||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements_test.txt)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
|
|
@ -95,7 +95,6 @@ homeassistant.components.aruba.*
|
|||
homeassistant.components.arwn.*
|
||||
homeassistant.components.aseko_pool_live.*
|
||||
homeassistant.components.assist_pipeline.*
|
||||
homeassistant.components.assist_satellite.*
|
||||
homeassistant.components.asuswrt.*
|
||||
homeassistant.components.autarco.*
|
||||
homeassistant.components.auth.*
|
||||
|
@ -111,7 +110,6 @@ homeassistant.components.bitcoin.*
|
|||
homeassistant.components.blockchain.*
|
||||
homeassistant.components.blue_current.*
|
||||
homeassistant.components.blueprint.*
|
||||
homeassistant.components.bluesound.*
|
||||
homeassistant.components.bluetooth.*
|
||||
homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
|
@ -124,7 +122,6 @@ homeassistant.components.bryant_evolution.*
|
|||
homeassistant.components.bthome.*
|
||||
homeassistant.components.button.*
|
||||
homeassistant.components.calendar.*
|
||||
homeassistant.components.cambridge_audio.*
|
||||
homeassistant.components.camera.*
|
||||
homeassistant.components.canary.*
|
||||
homeassistant.components.cert_expiry.*
|
||||
|
@ -142,7 +139,6 @@ homeassistant.components.cpuspeed.*
|
|||
homeassistant.components.crownstone.*
|
||||
homeassistant.components.date.*
|
||||
homeassistant.components.datetime.*
|
||||
homeassistant.components.deako.*
|
||||
homeassistant.components.deconz.*
|
||||
homeassistant.components.default_config.*
|
||||
homeassistant.components.demo.*
|
||||
|
@ -209,14 +205,10 @@ homeassistant.components.geo_location.*
|
|||
homeassistant.components.geocaching.*
|
||||
homeassistant.components.gios.*
|
||||
homeassistant.components.glances.*
|
||||
homeassistant.components.go2rtc.*
|
||||
homeassistant.components.goalzero.*
|
||||
homeassistant.components.google.*
|
||||
homeassistant.components.google_assistant_sdk.*
|
||||
homeassistant.components.google_cloud.*
|
||||
homeassistant.components.google_photos.*
|
||||
homeassistant.components.google_sheets.*
|
||||
homeassistant.components.govee_ble.*
|
||||
homeassistant.components.gpsd.*
|
||||
homeassistant.components.greeneye_monitor.*
|
||||
homeassistant.components.group.*
|
||||
|
@ -286,7 +278,6 @@ homeassistant.components.lawn_mower.*
|
|||
homeassistant.components.lcn.*
|
||||
homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
|
@ -303,7 +294,9 @@ homeassistant.components.london_underground.*
|
|||
homeassistant.components.lookin.*
|
||||
homeassistant.components.luftdaten.*
|
||||
homeassistant.components.madvr.*
|
||||
homeassistant.components.mailbox.*
|
||||
homeassistant.components.manual.*
|
||||
homeassistant.components.map.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
|
@ -318,19 +311,16 @@ homeassistant.components.minecraft_server.*
|
|||
homeassistant.components.mjpeg.*
|
||||
homeassistant.components.modbus.*
|
||||
homeassistant.components.modem_callerid.*
|
||||
homeassistant.components.mold_indicator.*
|
||||
homeassistant.components.monzo.*
|
||||
homeassistant.components.moon.*
|
||||
homeassistant.components.mopeka.*
|
||||
homeassistant.components.motionmount.*
|
||||
homeassistant.components.mqtt.*
|
||||
homeassistant.components.music_assistant.*
|
||||
homeassistant.components.my.*
|
||||
homeassistant.components.mysensors.*
|
||||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
|
@ -340,7 +330,6 @@ homeassistant.components.nfandroidtv.*
|
|||
homeassistant.components.nightscout.*
|
||||
homeassistant.components.nissan_leaf.*
|
||||
homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.number.*
|
||||
|
@ -348,9 +337,7 @@ homeassistant.components.nut.*
|
|||
homeassistant.components.onboarding.*
|
||||
homeassistant.components.oncue.*
|
||||
homeassistant.components.onewire.*
|
||||
homeassistant.components.onkyo.*
|
||||
homeassistant.components.open_meteo.*
|
||||
homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
|
@ -358,7 +345,6 @@ homeassistant.components.oralb.*
|
|||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.pi_hole.*
|
||||
|
@ -376,7 +362,6 @@ homeassistant.components.pvoutput.*
|
|||
homeassistant.components.qnap_qsw.*
|
||||
homeassistant.components.rabbitair.*
|
||||
homeassistant.components.radarr.*
|
||||
homeassistant.components.radio_browser.*
|
||||
homeassistant.components.rainforest_raven.*
|
||||
homeassistant.components.rainmachine.*
|
||||
homeassistant.components.raspberry_pi.*
|
||||
|
@ -411,10 +396,8 @@ homeassistant.components.select.*
|
|||
homeassistant.components.sensibo.*
|
||||
homeassistant.components.sensirion_ble.*
|
||||
homeassistant.components.sensor.*
|
||||
homeassistant.components.sensoterra.*
|
||||
homeassistant.components.senz.*
|
||||
homeassistant.components.sfr_box.*
|
||||
homeassistant.components.shell_command.*
|
||||
homeassistant.components.shelly.*
|
||||
homeassistant.components.shopping_list.*
|
||||
homeassistant.components.simplepush.*
|
||||
|
@ -424,14 +407,10 @@ homeassistant.components.skybell.*
|
|||
homeassistant.components.slack.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
homeassistant.components.speedtestdotnet.*
|
||||
homeassistant.components.spotify.*
|
||||
homeassistant.components.sql.*
|
||||
homeassistant.components.squeezebox.*
|
||||
homeassistant.components.ssdp.*
|
||||
homeassistant.components.starlink.*
|
||||
homeassistant.components.statistics.*
|
||||
|
@ -444,7 +423,6 @@ homeassistant.components.suez_water.*
|
|||
homeassistant.components.sun.*
|
||||
homeassistant.components.surepetcare.*
|
||||
homeassistant.components.switch.*
|
||||
homeassistant.components.switch_as_x.*
|
||||
homeassistant.components.switchbee.*
|
||||
homeassistant.components.switchbot_cloud.*
|
||||
homeassistant.components.switcher_kis.*
|
||||
|
@ -492,7 +470,6 @@ homeassistant.components.update.*
|
|||
homeassistant.components.uptime.*
|
||||
homeassistant.components.uptimerobot.*
|
||||
homeassistant.components.usb.*
|
||||
homeassistant.components.uvc.*
|
||||
homeassistant.components.vacuum.*
|
||||
homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
|
@ -513,7 +490,6 @@ homeassistant.components.whois.*
|
|||
homeassistant.components.withings.*
|
||||
homeassistant.components.wiz.*
|
||||
homeassistant.components.wled.*
|
||||
homeassistant.components.workday.*
|
||||
homeassistant.components.worldclock.*
|
||||
homeassistant.components.xiaomi_ble.*
|
||||
homeassistant.components.yale_smart_alarm.*
|
||||
|
|
10
.vscode/settings.default.json
vendored
10
.vscode/settings.default.json
vendored
|
@ -6,13 +6,5 @@
|
|||
// https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings
|
||||
"python.testing.pytestEnabled": false,
|
||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": [
|
||||
"homeassistant/components/*/manifest.json"
|
||||
],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
"pylint.importStrategy": "fromEnvironment"
|
||||
}
|
||||
|
|
112
CODEOWNERS
112
CODEOWNERS
|
@ -40,8 +40,6 @@ build.json @home-assistant/supervisor
|
|||
# Integrations
|
||||
/homeassistant/components/abode/ @shred86
|
||||
/tests/components/abode/ @shred86
|
||||
/homeassistant/components/acaia/ @zweckj
|
||||
/tests/components/acaia/ @zweckj
|
||||
/homeassistant/components/accuweather/ @bieniu
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
|
@ -50,7 +48,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/adax/ @danielhiversen
|
||||
/homeassistant/components/adguard/ @frenck
|
||||
/tests/components/adguard/ @frenck
|
||||
/homeassistant/components/ads/ @mrpasztoradam
|
||||
/homeassistant/components/advantage_air/ @Bre77
|
||||
/tests/components/advantage_air/ @Bre77
|
||||
/homeassistant/components/aemet/ @Noltari
|
||||
|
@ -146,8 +143,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/aseko_pool_live/ @milanmeu
|
||||
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/assist_satellite/ @synesthesiam
|
||||
/tests/components/assist_satellite/ @synesthesiam
|
||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
|
||||
/tests/components/asuswrt/ @kennedyshead @ollo69
|
||||
/homeassistant/components/atag/ @MatsNL
|
||||
|
@ -233,16 +230,14 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/bsblan/ @liudger
|
||||
/tests/components/bsblan/ @liudger
|
||||
/homeassistant/components/bt_smarthub/ @typhoon2099
|
||||
/homeassistant/components/bthome/ @Ernst79 @thecode
|
||||
/tests/components/bthome/ @Ernst79 @thecode
|
||||
/homeassistant/components/bthome/ @Ernst79
|
||||
/tests/components/bthome/ @Ernst79
|
||||
/homeassistant/components/buienradar/ @mjj4791 @ties @Robbie1221
|
||||
/tests/components/buienradar/ @mjj4791 @ties @Robbie1221
|
||||
/homeassistant/components/button/ @home-assistant/core
|
||||
/tests/components/button/ @home-assistant/core
|
||||
/homeassistant/components/calendar/ @home-assistant/core
|
||||
/tests/components/calendar/ @home-assistant/core
|
||||
/homeassistant/components/cambridge_audio/ @noahhusby
|
||||
/tests/components/cambridge_audio/ @noahhusby
|
||||
/homeassistant/components/camera/ @home-assistant/core
|
||||
/tests/components/camera/ @home-assistant/core
|
||||
/homeassistant/components/cast/ @emontnemery
|
||||
|
@ -301,8 +296,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/date/ @home-assistant/core
|
||||
/homeassistant/components/datetime/ @home-assistant/core
|
||||
/tests/components/datetime/ @home-assistant/core
|
||||
/homeassistant/components/deako/ @sebirdman @balake @deakolights
|
||||
/tests/components/deako/ @sebirdman @balake @deakolights
|
||||
/homeassistant/components/debugpy/ @frenck
|
||||
/tests/components/debugpy/ @frenck
|
||||
/homeassistant/components/deconz/ @Kane610
|
||||
|
@ -362,8 +355,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duke_energy/ @hunterjm
|
||||
/tests/components/duke_energy/ @hunterjm
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
/tests/components/duotecno/ @cereal2nd
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192 @andarotajo
|
||||
|
@ -498,8 +489,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
/tests/components/fritzbox/ @mib1185 @flabbamann
|
||||
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
|
||||
|
@ -546,8 +537,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/github/ @timmo001 @ludeeus
|
||||
/homeassistant/components/glances/ @engrbm87
|
||||
/tests/components/glances/ @engrbm87
|
||||
/homeassistant/components/go2rtc/ @home-assistant/core
|
||||
/tests/components/go2rtc/ @home-assistant/core
|
||||
/homeassistant/components/goalzero/ @tkdrob
|
||||
/tests/components/goalzero/ @tkdrob
|
||||
/homeassistant/components/gogogate2/ @vangorra
|
||||
|
@ -560,14 +549,11 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/google_assistant/ @home-assistant/cloud
|
||||
/homeassistant/components/google_assistant_sdk/ @tronikos
|
||||
/tests/components/google_assistant_sdk/ @tronikos
|
||||
/homeassistant/components/google_cloud/ @lufton @tronikos
|
||||
/tests/components/google_cloud/ @lufton @tronikos
|
||||
/homeassistant/components/google_cloud/ @lufton
|
||||
/homeassistant/components/google_generative_ai_conversation/ @tronikos
|
||||
/tests/components/google_generative_ai_conversation/ @tronikos
|
||||
/homeassistant/components/google_mail/ @tkdrob
|
||||
/tests/components/google_mail/ @tkdrob
|
||||
/homeassistant/components/google_photos/ @allenporter
|
||||
/tests/components/google_photos/ @allenporter
|
||||
/homeassistant/components/google_sheets/ @tkdrob
|
||||
/tests/components/google_sheets/ @tkdrob
|
||||
/homeassistant/components/google_tasks/ @allenporter
|
||||
|
@ -619,8 +605,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/hlk_sw16/ @jameshilliard
|
||||
/homeassistant/components/holiday/ @jrieger @gjohansson-ST
|
||||
/tests/components/holiday/ @jrieger @gjohansson-ST
|
||||
/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98
|
||||
/tests/components/home_connect/ @DavidMStraub @Diegorro98
|
||||
/homeassistant/components/home_connect/ @DavidMStraub
|
||||
/tests/components/home_connect/ @DavidMStraub
|
||||
/homeassistant/components/homeassistant/ @home-assistant/core
|
||||
/tests/components/homeassistant/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||
|
@ -645,8 +631,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/homewizard/ @DCSBL
|
||||
/homeassistant/components/honeywell/ @rdfurman @mkmer
|
||||
/tests/components/honeywell/ @rdfurman @mkmer
|
||||
/homeassistant/components/html5/ @alexyao2015
|
||||
/tests/components/html5/ @alexyao2015
|
||||
/homeassistant/components/http/ @home-assistant/core
|
||||
/tests/components/http/ @home-assistant/core
|
||||
/homeassistant/components/huawei_lte/ @scop @fphammerle
|
||||
|
@ -661,8 +645,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
||||
/homeassistant/components/husqvarna_automower/ @Thomas55555
|
||||
/tests/components/husqvarna_automower/ @Thomas55555
|
||||
/homeassistant/components/husqvarna_automower_ble/ @alistair23
|
||||
/tests/components/husqvarna_automower_ble/ @alistair23
|
||||
/homeassistant/components/huum/ @frwickst
|
||||
/tests/components/huum/ @frwickst
|
||||
/homeassistant/components/hvv_departures/ @vigonotion
|
||||
|
@ -727,8 +709,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/tests/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/homeassistant/components/iotty/ @pburgio
|
||||
/tests/components/iotty/ @pburgio
|
||||
/homeassistant/components/iperf3/ @rohankapoorcom
|
||||
/homeassistant/components/ipma/ @dgomes
|
||||
/tests/components/ipma/ @dgomes
|
||||
|
@ -741,8 +723,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/iron_os/ @tr4nt0r
|
||||
/homeassistant/components/isal/ @bdraco
|
||||
/tests/components/isal/ @bdraco
|
||||
/homeassistant/components/iskra/ @iskramis
|
||||
/tests/components/iskra/ @iskramis
|
||||
/homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||
/tests/components/islamic_prayer_times/ @engrbm87 @cpfair
|
||||
/homeassistant/components/israel_rail/ @shaiu
|
||||
|
@ -819,12 +799,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/leaone/ @bdraco
|
||||
/homeassistant/components/led_ble/ @bdraco
|
||||
/tests/components/led_ble/ @bdraco
|
||||
/homeassistant/components/lektrico/ @lektrico
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
|
@ -869,8 +845,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/lupusec/ @majuss @suaveolent
|
||||
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
||||
/tests/components/lutron/ @cdheiser @wilburCForce
|
||||
/homeassistant/components/lutron_caseta/ @swails @danaues @eclair4151
|
||||
/tests/components/lutron_caseta/ @swails @danaues @eclair4151
|
||||
/homeassistant/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
|
||||
/tests/components/lutron_caseta/ @swails @bdraco @danaues @eclair4151
|
||||
/homeassistant/components/lyric/ @timmo001
|
||||
/tests/components/lyric/ @timmo001
|
||||
/homeassistant/components/madvr/ @iloveicedgreentea
|
||||
|
@ -933,8 +909,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/modern_forms/ @wonderslug
|
||||
/homeassistant/components/moehlenhoff_alpha2/ @j-a-n
|
||||
/tests/components/moehlenhoff_alpha2/ @j-a-n
|
||||
/homeassistant/components/monarch_money/ @jeeftor
|
||||
/tests/components/monarch_money/ @jeeftor
|
||||
/homeassistant/components/monoprice/ @etsinko @OnFreund
|
||||
/tests/components/monoprice/ @etsinko @OnFreund
|
||||
/homeassistant/components/monzo/ @jakemartin-icl
|
||||
|
@ -956,8 +930,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant
|
||||
/tests/components/music_assistant/ @music-assistant
|
||||
/homeassistant/components/mutesync/ @currentoor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
|
@ -972,8 +944,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/neato/ @Santobert
|
||||
/tests/components/neato/ @Santobert
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
|
@ -1014,8 +984,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||
/tests/components/nordpool/ @gjohansson-ST
|
||||
/homeassistant/components/notify/ @home-assistant/core
|
||||
/tests/components/notify/ @home-assistant/core
|
||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||
|
@ -1038,8 +1006,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/nut/ @bdraco @ollo69 @pestevez
|
||||
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
|
||||
/tests/components/nws/ @MatthewFlamm @kamiyo
|
||||
/homeassistant/components/nyt_games/ @joostlek
|
||||
/tests/components/nyt_games/ @joostlek
|
||||
/homeassistant/components/nzbget/ @chriscla
|
||||
/tests/components/nzbget/ @chriscla
|
||||
/homeassistant/components/obihai/ @dshokouhi @ejpenney
|
||||
|
@ -1059,7 +1025,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz
|
||||
/tests/components/onkyo/ @arturpragacz
|
||||
/homeassistant/components/onvif/ @hunterjm
|
||||
/tests/components/onvif/ @hunterjm
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
|
@ -1101,10 +1066,10 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
/tests/components/p1_monitor/ @klaasnicolaas
|
||||
/homeassistant/components/palazzetti/ @dotvav
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/panel_iframe/ @home-assistant/frontend
|
||||
/tests/components/panel_iframe/ @home-assistant/frontend
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
/tests/components/peco/ @IceBotYT
|
||||
/homeassistant/components/pegel_online/ @mib1185
|
||||
|
@ -1119,6 +1084,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl
|
||||
/tests/components/picnic/ @corneyl
|
||||
/homeassistant/components/pilight/ @trekky12
|
||||
/tests/components/pilight/ @trekky12
|
||||
/homeassistant/components/ping/ @jpbede
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
|
@ -1148,8 +1115,8 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/prusalink/ @balloob
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/prusalink/ @balloob @Skaronator
|
||||
/tests/components/prusalink/ @balloob @Skaronator
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||
|
@ -1252,8 +1219,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/roku/ @ctalkington
|
||||
/homeassistant/components/romy/ @xeniter
|
||||
/tests/components/romy/ @xeniter
|
||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||
/homeassistant/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous
|
||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Xitee1 @Orhideous
|
||||
/homeassistant/components/roon/ @pavoni
|
||||
/tests/components/roon/ @pavoni
|
||||
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
||||
|
@ -1310,8 +1277,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/sensorpro/ @bdraco
|
||||
/homeassistant/components/sensorpush/ @bdraco
|
||||
/tests/components/sensorpush/ @bdraco
|
||||
/homeassistant/components/sensoterra/ @markruys
|
||||
/tests/components/sensoterra/ @markruys
|
||||
/homeassistant/components/sentry/ @dcramer @frenck
|
||||
/tests/components/sentry/ @dcramer @frenck
|
||||
/homeassistant/components/senz/ @milanmeu
|
||||
|
@ -1346,8 +1311,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
/tests/components/skybell/ @tkdrob
|
||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||
|
@ -1366,7 +1329,6 @@ build.json @home-assistant/supervisor
|
|||
/homeassistant/components/smarttub/ @mdz
|
||||
/tests/components/smarttub/ @mdz
|
||||
/homeassistant/components/smarty/ @z0mbieprocess
|
||||
/tests/components/smarty/ @z0mbieprocess
|
||||
/homeassistant/components/smhi/ @gjohansson-ST
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/smlight/ @tl-sl
|
||||
|
@ -1400,13 +1362,15 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/spaceapi/ @fabaff
|
||||
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/homeassistant/components/spider/ @peternijssen
|
||||
/tests/components/spider/ @peternijssen
|
||||
/homeassistant/components/splunk/ @Bre77
|
||||
/homeassistant/components/spotify/ @frenck @joostlek
|
||||
/tests/components/spotify/ @frenck @joostlek
|
||||
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
/tests/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
/homeassistant/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||
/tests/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||
/homeassistant/components/squeezebox/ @rajlaud
|
||||
/tests/components/squeezebox/ @rajlaud
|
||||
/homeassistant/components/srp_energy/ @briglx
|
||||
/tests/components/srp_energy/ @briglx
|
||||
/homeassistant/components/starline/ @anonym-tsk
|
||||
|
@ -1430,8 +1394,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/stt/ @home-assistant/core
|
||||
/homeassistant/components/subaru/ @G-Two
|
||||
/tests/components/subaru/ @G-Two
|
||||
/homeassistant/components/suez_water/ @ooii @jb101010-2
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/suez_water/ @ooii
|
||||
/tests/components/suez_water/ @ooii
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/sunweg/ @rokam
|
||||
|
@ -1450,10 +1414,10 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/switchbee/ @jafar-atili
|
||||
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/tests/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland
|
||||
/homeassistant/components/switcher_kis/ @thecode
|
||||
/tests/components/switcher_kis/ @thecode
|
||||
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
|
||||
/homeassistant/components/syncthing/ @zhulik
|
||||
/tests/components/syncthing/ @zhulik
|
||||
|
@ -1489,8 +1453,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
|
@ -1531,8 +1495,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/tomorrowio/ @raman325 @lymanepp
|
||||
/homeassistant/components/totalconnect/ @austinmroczek
|
||||
/tests/components/totalconnect/ @austinmroczek
|
||||
/homeassistant/components/touchline_sl/ @jnsgruk
|
||||
/tests/components/touchline_sl/ @jnsgruk
|
||||
/homeassistant/components/tplink/ @rytilahti @bdraco @sdb9696
|
||||
/tests/components/tplink/ @rytilahti @bdraco @sdb9696
|
||||
/homeassistant/components/tplink_omada/ @MarkGodwin
|
||||
|
@ -1557,8 +1519,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/transmission/ @engrbm87 @JPHutchins
|
||||
/homeassistant/components/trend/ @jpbede
|
||||
/tests/components/trend/ @jpbede
|
||||
/homeassistant/components/triggercmd/ @rvmey
|
||||
/tests/components/triggercmd/ @rvmey
|
||||
/homeassistant/components/tts/ @home-assistant/core
|
||||
/tests/components/tts/ @home-assistant/core
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
|
@ -1663,8 +1623,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/webostv/ @thecode
|
||||
/homeassistant/components/websocket_api/ @home-assistant/core
|
||||
/tests/components/websocket_api/ @home-assistant/core
|
||||
/homeassistant/components/weheat/ @jesperraemaekers
|
||||
/tests/components/weheat/ @jesperraemaekers
|
||||
/homeassistant/components/wemo/ @esev
|
||||
/tests/components/wemo/ @esev
|
||||
/homeassistant/components/whirlpool/ @abmantis @mkmer
|
||||
|
@ -1682,8 +1640,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/wiz/ @sbidy
|
||||
/homeassistant/components/wled/ @frenck
|
||||
/tests/components/wled/ @frenck
|
||||
/homeassistant/components/wmspro/ @mback2k
|
||||
/tests/components/wmspro/ @mback2k
|
||||
/homeassistant/components/wolflink/ @adamkrol93 @mtielen
|
||||
/tests/components/wolflink/ @adamkrol93 @mtielen
|
||||
/homeassistant/components/workday/ @fabaff @gjohansson-ST
|
||||
|
@ -1704,8 +1660,6 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/xiaomi_miio/ @rytilahti @syssi @starkillerOG
|
||||
/homeassistant/components/xiaomi_tv/ @simse
|
||||
/homeassistant/components/xmpp/ @fabaff @flowolf
|
||||
/homeassistant/components/yale/ @bdraco
|
||||
/tests/components/yale/ @bdraco
|
||||
/homeassistant/components/yale_smart_alarm/ @gjohansson-ST
|
||||
/tests/components/yale_smart_alarm/ @gjohansson-ST
|
||||
/homeassistant/components/yalexs_ble/ @bdraco
|
||||
|
|
32
Dockerfile
32
Dockerfile
|
@ -7,13 +7,12 @@ FROM ${BUILD_FROM}
|
|||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
UV_SYSTEM_PYTHON=true
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.0
|
||||
RUN pip3 install uv==0.2.27
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -30,9 +29,15 @@ RUN \
|
|||
if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \
|
||||
uv pip install homeassistant/home_assistant_*.whl; \
|
||||
fi \
|
||||
&& uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt
|
||||
&& if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
linux32 uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
else \
|
||||
uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
fi
|
||||
|
||||
## Setup Home Assistant Core
|
||||
COPY . homeassistant/
|
||||
|
@ -45,19 +50,4 @@ RUN \
|
|||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
WORKDIR /config
|
||||
|
|
|
@ -35,9 +35,6 @@ RUN \
|
|||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv
|
||||
|
||||
|
@ -45,8 +42,7 @@ WORKDIR /usr/src
|
|||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
|
||||
&& uv pip install --system -e hass-release/ \
|
||||
&& chown -R vscode /usr/src/hass-release/data
|
||||
&& uv pip install --system -e hass-release/
|
||||
|
||||
USER vscode
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
|
|
|
@ -7,6 +7,8 @@ Check out `home-assistant.io <https://home-assistant.io>`__ for `a
|
|||
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
|
||||
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
|
||||
|
||||
This is a project of the `Open Home Foundation <https://www.openhomefoundation.org/>`__.
|
||||
|
||||
|screenshot-states|
|
||||
|
||||
Featured integrations
|
||||
|
@ -20,14 +22,9 @@ components <https://developers.home-assistant.io/docs/creating_component_index/>
|
|||
If you run into issues while using Home Assistant or during development
|
||||
of a component, check the `Home Assistant help section <https://home-assistant.io/help/>`__ of our website for further help and information.
|
||||
|
||||
|ohf-logo|
|
||||
|
||||
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
||||
:target: https://www.home-assistant.io/join-chat/
|
||||
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png
|
||||
:target: https://demo.home-assistant.io
|
||||
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png
|
||||
:target: https://home-assistant.io/integrations/
|
||||
.. |ohf-logo| image:: https://www.openhomefoundation.org/badges/home-assistant.png
|
||||
:alt: Home Assistant - A project from the Open Home Foundation
|
||||
:target: https://www.openhomefoundation.org/
|
||||
|
|
10
build.yaml
10
build.yaml
|
@ -1,10 +1,10 @@
|
|||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
|
|
@ -9,7 +9,6 @@ import os
|
|||
import sys
|
||||
import threading
|
||||
|
||||
from .backup_restore import restore_backup
|
||||
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
||||
|
||||
FAULT_LOG_FILENAME = "home-assistant.log.fault"
|
||||
|
@ -183,9 +182,6 @@ def main() -> int:
|
|||
return scripts.run(args.script)
|
||||
|
||||
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
|
||||
if restore_backup(config_dir):
|
||||
return RESTART_EXIT_CODE
|
||||
|
||||
ensure_config_path(config_dir)
|
||||
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
|
|
|
@ -12,6 +12,7 @@ from typing import Any, cast
|
|||
|
||||
import jwt
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HassJob,
|
||||
|
@ -19,14 +20,13 @@ from homeassistant.core import (
|
|||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import auth_store, jwt_wrapper, models
|
||||
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRATION
|
||||
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
|
||||
from .models import AuthFlowContext, AuthFlowResult
|
||||
from .models import AuthFlowResult
|
||||
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
|
||||
from .providers.homeassistant import HassAuthProvider
|
||||
|
||||
|
@ -98,7 +98,7 @@ async def auth_manager_from_config(
|
|||
|
||||
|
||||
class AuthManagerFlowManager(
|
||||
FlowManager[AuthFlowContext, AuthFlowResult, tuple[str, str]]
|
||||
data_entry_flow.FlowManager[AuthFlowResult, tuple[str, str]]
|
||||
):
|
||||
"""Manage authentication flows."""
|
||||
|
||||
|
@ -113,7 +113,7 @@ class AuthManagerFlowManager(
|
|||
self,
|
||||
handler_key: tuple[str, str],
|
||||
*,
|
||||
context: AuthFlowContext | None = None,
|
||||
context: dict[str, Any] | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
) -> LoginFlow:
|
||||
"""Create a login flow."""
|
||||
|
@ -124,17 +124,13 @@ class AuthManagerFlowManager(
|
|||
|
||||
async def async_finish_flow(
|
||||
self,
|
||||
flow: FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||
flow: data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]],
|
||||
result: AuthFlowResult,
|
||||
) -> AuthFlowResult:
|
||||
"""Return a user as result of login flow.
|
||||
|
||||
This method is called when a flow step returns FlowResultType.ABORT or
|
||||
FlowResultType.CREATE_ENTRY.
|
||||
"""
|
||||
"""Return a user as result of login flow."""
|
||||
flow = cast(LoginFlow, flow)
|
||||
|
||||
if result["type"] != FlowResultType.CREATE_ENTRY:
|
||||
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
||||
return result
|
||||
|
||||
# we got final result
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
from functools import cached_property
|
||||
import secrets
|
||||
from typing import Any, NamedTuple
|
||||
import uuid
|
||||
|
@ -11,10 +11,9 @@ import uuid
|
|||
import attr
|
||||
from attr import Attribute
|
||||
from attr.setters import validate
|
||||
from propcache import cached_property
|
||||
|
||||
from homeassistant.const import __version__
|
||||
from homeassistant.data_entry_flow import FlowContext, FlowResult
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import permissions as perm_mdl
|
||||
|
@ -24,16 +23,7 @@ TOKEN_TYPE_NORMAL = "normal"
|
|||
TOKEN_TYPE_SYSTEM = "system"
|
||||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
|
||||
|
||||
|
||||
class AuthFlowContext(FlowContext, total=False):
|
||||
"""Typed context dict for auth flow."""
|
||||
|
||||
credential_only: bool
|
||||
ip_address: IPv4Address | IPv6Address
|
||||
redirect_uri: str
|
||||
|
||||
|
||||
AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]]
|
||||
AuthFlowResult = FlowResult[tuple[str, str]]
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
|
|
|
@ -10,10 +10,9 @@ from typing import Any
|
|||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from homeassistant import requirements
|
||||
from homeassistant import data_entry_flow, requirements
|
||||
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import FlowHandler
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.importlib import async_import_module
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
@ -22,14 +21,7 @@ from homeassistant.util.hass_dict import HassKey
|
|||
|
||||
from ..auth_store import AuthStore
|
||||
from ..const import MFA_SESSION_EXPIRATION
|
||||
from ..models import (
|
||||
AuthFlowContext,
|
||||
AuthFlowResult,
|
||||
Credentials,
|
||||
RefreshToken,
|
||||
User,
|
||||
UserMeta,
|
||||
)
|
||||
from ..models import AuthFlowResult, Credentials, RefreshToken, User, UserMeta
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
DATA_REQS: HassKey[set[str]] = HassKey("auth_prov_reqs_processed")
|
||||
|
@ -105,7 +97,7 @@ class AuthProvider:
|
|||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return the data flow for logging in with auth provider.
|
||||
|
||||
Auth provider should extend LoginFlow and return an instance.
|
||||
|
@ -192,7 +184,7 @@ async def load_auth_provider_module(
|
|||
return module
|
||||
|
||||
|
||||
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
||||
class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
_flow_result = AuthFlowResult
|
||||
|
|
|
@ -13,7 +13,7 @@ import voluptuous as vol
|
|||
from homeassistant.const import CONF_COMMAND
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||
from ..models import AuthFlowResult, Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
CONF_ARGS = "args"
|
||||
|
@ -59,7 +59,7 @@ class CommandLineAuthProvider(AuthProvider):
|
|||
super().__init__(*args, **kwargs)
|
||||
self._user_meta: dict[str, dict[str, Any]] = {}
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return CommandLineLoginFlow(self)
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ from homeassistant.exceptions import HomeAssistantError
|
|||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||
from ..models import AuthFlowResult, Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
|
@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider):
|
|||
await data.async_load()
|
||||
self.data = data
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return HassLoginFlow(self)
|
||||
|
||||
|
|
|
@ -4,14 +4,14 @@ from __future__ import annotations
|
|||
|
||||
from collections.abc import Mapping
|
||||
import hmac
|
||||
from typing import cast
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from ..models import AuthFlowContext, AuthFlowResult, Credentials, UserMeta
|
||||
from ..models import AuthFlowResult, Credentials, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
|
@ -36,7 +36,7 @@ class InvalidAuthError(HomeAssistantError):
|
|||
class ExampleAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return ExampleLoginFlow(self)
|
||||
|
||||
|
|
|
@ -25,13 +25,7 @@ import homeassistant.helpers.config_validation as cv
|
|||
from homeassistant.helpers.network import is_cloud_connection
|
||||
|
||||
from .. import InvalidAuthError
|
||||
from ..models import (
|
||||
AuthFlowContext,
|
||||
AuthFlowResult,
|
||||
Credentials,
|
||||
RefreshToken,
|
||||
UserMeta,
|
||||
)
|
||||
from ..models import AuthFlowResult, Credentials, RefreshToken, UserMeta
|
||||
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
|
||||
|
||||
type IPAddress = IPv4Address | IPv6Address
|
||||
|
@ -104,7 +98,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
|||
"""Trusted Networks auth provider does not support MFA."""
|
||||
return False
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: dict[str, Any] | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
assert context is not None
|
||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||
|
|
|
@ -9,7 +9,6 @@ import it.
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
# pylint: disable-next=hass-deprecated-import
|
||||
from functools import cached_property as _cached_property, partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
|
|
|
@ -1,126 +0,0 @@
|
|||
"""Home Assistant module to handle restoring backups."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
import securetar
|
||||
|
||||
from .const import __version__ as HA_VERSION
|
||||
|
||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||
KEEP_PATHS = ("backups",)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RestoreBackupFileContent:
|
||||
"""Definition for restore backup file content."""
|
||||
|
||||
backup_file_path: Path
|
||||
|
||||
|
||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||
"""Return the contents of the restore backup file."""
|
||||
instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE)
|
||||
try:
|
||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||
return RestoreBackupFileContent(
|
||||
backup_file_path=Path(instruction_content["path"])
|
||||
)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||
config_contents = sorted(
|
||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||
)
|
||||
|
||||
for entry in config_contents:
|
||||
entrypath = config_dir.joinpath(entry)
|
||||
|
||||
if entrypath.is_file():
|
||||
entrypath.unlink()
|
||||
elif entrypath.is_dir():
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
|
||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
"""Extract the backup file to the config directory."""
|
||||
with (
|
||||
TemporaryDirectory() as tempdir,
|
||||
securetar.SecureTarFile(
|
||||
backup_file_path,
|
||||
gzip=False,
|
||||
mode="r",
|
||||
) as ostf,
|
||||
):
|
||||
ostf.extractall(
|
||||
path=Path(tempdir, "extracted"),
|
||||
members=securetar.secure_path(ostf),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
backup_meta_file = Path(tempdir, "extracted", "backup.json")
|
||||
backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8"))
|
||||
|
||||
if (
|
||||
backup_meta_version := AwesomeVersion(
|
||||
backup_meta["homeassistant"]["version"]
|
||||
)
|
||||
) > HA_VERSION:
|
||||
raise ValueError(
|
||||
f"You need at least Home Assistant version {backup_meta_version} to restore this backup"
|
||||
)
|
||||
|
||||
with securetar.SecureTarFile(
|
||||
Path(
|
||||
tempdir,
|
||||
"extracted",
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
mode="r",
|
||||
) as istf:
|
||||
for member in istf.getmembers():
|
||||
if member.name == "data":
|
||||
continue
|
||||
member.name = member.name.replace("data/", "")
|
||||
_clear_configuration_directory(config_dir)
|
||||
istf.extractall(
|
||||
path=config_dir,
|
||||
members=[
|
||||
member
|
||||
for member in securetar.secure_path(istf)
|
||||
if member.name != "data"
|
||||
],
|
||||
filter="fully_trusted",
|
||||
)
|
||||
|
||||
|
||||
def restore_backup(config_dir_path: str) -> bool:
|
||||
"""Restore the backup file if any.
|
||||
|
||||
Returns True if a restore backup file was found and restored, False otherwise.
|
||||
"""
|
||||
config_dir = Path(config_dir_path)
|
||||
if not (restore_content := restore_backup_file_content(config_dir)):
|
||||
return False
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
backup_file_path = restore_content.backup_file_path
|
||||
_LOGGER.info("Restoring %s", backup_file_path)
|
||||
try:
|
||||
_extract_backup(config_dir, backup_file_path)
|
||||
except FileNotFoundError as err:
|
||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||
_LOGGER.info("Restore complete, restarting")
|
||||
return True
|
|
@ -70,7 +70,6 @@ from .const import (
|
|||
REQUIRED_NEXT_PYTHON_VER,
|
||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||
)
|
||||
from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
|
@ -480,7 +479,7 @@ async def async_from_config_dict(
|
|||
core_config = config.get(core.DOMAIN, {})
|
||||
|
||||
try:
|
||||
await async_process_ha_core_config(hass, core_config)
|
||||
await conf_util.async_process_ha_core_config(hass, core_config)
|
||||
except vol.Invalid as config_err:
|
||||
conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
|
||||
async_notify_setup_error(hass, core.DOMAIN)
|
||||
|
@ -515,7 +514,7 @@ async def async_from_config_dict(
|
|||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
core.DOMAIN,
|
||||
f"python_version_{required_python_version}",
|
||||
"python_version",
|
||||
is_fixable=False,
|
||||
severity=issue_registry.IssueSeverity.WARNING,
|
||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "aqara",
|
||||
"name": "Aqara",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
}
|
|
@ -5,10 +5,10 @@
|
|||
"google_assistant",
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_domains",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
"google_photos",
|
||||
"google_pubsub",
|
||||
"google_sheets",
|
||||
"google_tasks",
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "husqvarna",
|
||||
"name": "Husqvarna",
|
||||
"integrations": ["husqvarna_automower", "husqvarna_automower_ble"]
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"domain": "lg",
|
||||
"name": "LG",
|
||||
"integrations": ["lg_netcast", "lg_soundbar", "lg_thinq", "webostv"]
|
||||
"integrations": ["lg_netcast", "lg_soundbar", "webostv"]
|
||||
}
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "roth",
|
||||
"name": "Roth",
|
||||
"integrations": ["touchline", "touchline_sl"]
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"domain": "sky",
|
||||
"name": "Sky",
|
||||
"integrations": ["sky_hub", "sky_remote"]
|
||||
}
|
|
@ -1,11 +1,5 @@
|
|||
{
|
||||
"domain": "yale",
|
||||
"name": "Yale",
|
||||
"integrations": [
|
||||
"august",
|
||||
"yale_smart_alarm",
|
||||
"yalexs_ble",
|
||||
"yale_home",
|
||||
"yale"
|
||||
]
|
||||
"integrations": ["august", "yale_smart_alarm", "yalexs_ble", "yale_home"]
|
||||
}
|
||||
|
|
|
@ -6,3 +6,52 @@ Component design guidelines:
|
|||
format "<DOMAIN>.<OBJECT_ID>".
|
||||
- Each component should publish services only under its own domain.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.core import HomeAssistant, split_entity_id
|
||||
from homeassistant.helpers.frame import report
|
||||
from homeassistant.helpers.group import expand_entity_ids
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_on(hass: HomeAssistant, entity_id: str | None = None) -> bool:
|
||||
"""Load up the module to call the is_on method.
|
||||
|
||||
If there is no entity id given we will check all.
|
||||
"""
|
||||
report(
|
||||
(
|
||||
"uses homeassistant.components.is_on."
|
||||
" This is deprecated and will stop working in Home Assistant 2024.9, it"
|
||||
" should be updated to use the function of the platform directly."
|
||||
),
|
||||
error_if_core=True,
|
||||
)
|
||||
|
||||
if entity_id:
|
||||
entity_ids = expand_entity_ids(hass, [entity_id])
|
||||
else:
|
||||
entity_ids = hass.states.entity_ids()
|
||||
|
||||
for ent_id in entity_ids:
|
||||
domain = split_entity_id(ent_id)[0]
|
||||
|
||||
try:
|
||||
component = getattr(hass.components, domain)
|
||||
|
||||
except ImportError:
|
||||
_LOGGER.error("Failed to call %s.is_on: component not found", domain)
|
||||
continue
|
||||
|
||||
if not hasattr(component, "is_on"):
|
||||
_LOGGER.warning("Integration %s has no is_on method", domain)
|
||||
continue
|
||||
|
||||
if component.is_on(ent_id):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
|
|
@ -4,10 +4,8 @@ from __future__ import annotations
|
|||
|
||||
from dataclasses import dataclass, field
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
from jaraco.abode.client import Client as Abode
|
||||
import jaraco.abode.config
|
||||
from jaraco.abode.exceptions import (
|
||||
AuthenticationException as AbodeAuthenticationException,
|
||||
Exception as AbodeException,
|
||||
|
@ -95,9 +93,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
password = entry.data[CONF_PASSWORD]
|
||||
polling = entry.data[CONF_POLLING]
|
||||
|
||||
# Configure abode library to use config directory for storing data
|
||||
jaraco.abode.config.paths.override(user_data=Path(hass.config.path("Abode")))
|
||||
|
||||
# For previous config entries where unique_id is None
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(
|
||||
|
|
|
@ -7,9 +7,13 @@ from jaraco.abode.devices.alarm import Alarm
|
|||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
|
@ -40,14 +44,14 @@ class AbodeAlarm(AbodeDevice, AlarmControlPanelEntity):
|
|||
_device: Alarm
|
||||
|
||||
@property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
def state(self) -> str | None:
|
||||
"""Return the state of the device."""
|
||||
if self._device.is_standby:
|
||||
return AlarmControlPanelState.DISARMED
|
||||
return STATE_ALARM_DISARMED
|
||||
if self._device.is_away:
|
||||
return AlarmControlPanelState.ARMED_AWAY
|
||||
return STATE_ALARM_ARMED_AWAY
|
||||
if self._device.is_home:
|
||||
return AlarmControlPanelState.ARMED_HOME
|
||||
return STATE_ALARM_ARMED_HOME
|
||||
return None
|
||||
|
||||
def alarm_disarm(self, code: str | None = None) -> None:
|
||||
|
|
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||
|
||||
from typing import cast
|
||||
|
||||
from jaraco.abode.devices.binary_sensor import BinarySensor
|
||||
from jaraco.abode.devices.sensor import BinarySensor
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
|
|
|
@ -102,7 +102,15 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
existing_entry = await self.async_set_unique_id(self._username)
|
||||
|
||||
if existing_entry:
|
||||
return self.async_update_reload_and_abort(existing_entry, data=config_data)
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry, data=config_data
|
||||
)
|
||||
# Reload the Abode config entry otherwise devices will remain unavailable
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||
)
|
||||
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=cast(str, self._username), data=config_data
|
||||
|
|
|
@ -7,14 +7,8 @@
|
|||
}
|
||||
},
|
||||
"services": {
|
||||
"capture_image": {
|
||||
"service": "mdi:camera"
|
||||
},
|
||||
"change_setting": {
|
||||
"service": "mdi:cog"
|
||||
},
|
||||
"trigger_automation": {
|
||||
"service": "mdi:play"
|
||||
}
|
||||
"capture_image": "mdi:camera",
|
||||
"change_setting": "mdi:cog",
|
||||
"trigger_automation": "mdi:play"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,5 +9,5 @@
|
|||
},
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["jaraco.abode", "lomond"],
|
||||
"requirements": ["jaraco.abode==6.2.1"]
|
||||
"requirements": ["jaraco.abode==5.2.1"]
|
||||
}
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
"""Initialize the Acaia component."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Set up acaia as config entry."""
|
||||
|
||||
coordinator = AcaiaCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
|
@ -1,61 +0,0 @@
|
|||
"""Button entities for Acaia scales."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Description for acaia button entities."""
|
||||
|
||||
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
|
||||
AcaiaButtonEntityDescription(
|
||||
key="tare",
|
||||
translation_key="tare",
|
||||
press_fn=lambda scale: scale.tare(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="reset_timer",
|
||||
translation_key="reset_timer",
|
||||
press_fn=lambda scale: scale.reset_timer(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="start_stop",
|
||||
translation_key="start_stop",
|
||||
press_fn=lambda scale: scale.start_stop_timer(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities and services."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
|
||||
|
||||
|
||||
class AcaiaButton(AcaiaEntity, ButtonEntity):
|
||||
"""Representation of an Acaia button."""
|
||||
|
||||
entity_description: AcaiaButtonEntityDescription
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
await self.entity_description.press_fn(self._scale)
|
|
@ -1,149 +0,0 @@
|
|||
"""Config flow for Acaia integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||
from aioacaia.helpers import is_new_scale
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_NAME
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for acaia."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered: dict[str, Any] = {}
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
mac = format_mac(user_input[CONF_ADDRESS])
|
||||
try:
|
||||
is_new_style_scale = await is_new_scale(mac)
|
||||
except AcaiaDeviceNotFound:
|
||||
errors["base"] = "device_not_found"
|
||||
except AcaiaError:
|
||||
_LOGGER.exception("Error occurred while connecting to the scale")
|
||||
errors["base"] = "unknown"
|
||||
except AcaiaUnknownDevice:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
else:
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[user_input[CONF_ADDRESS]],
|
||||
data={
|
||||
CONF_ADDRESS: mac,
|
||||
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||
},
|
||||
)
|
||||
|
||||
for device in async_discovered_service_info(self.hass):
|
||||
self._discovered_devices[device.address] = device.name
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
options = [
|
||||
SelectOptionDict(
|
||||
value=device_mac,
|
||||
label=f"{device_name} ({device_mac})",
|
||||
)
|
||||
for device_mac, device_name in self._discovered_devices.items()
|
||||
]
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Bluetooth device."""
|
||||
|
||||
self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address)
|
||||
self._discovered[CONF_NAME] = discovery_info.name
|
||||
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
|
||||
discovery_info.address
|
||||
)
|
||||
except AcaiaDeviceNotFound:
|
||||
_LOGGER.debug("Device not found during discovery")
|
||||
return self.async_abort(reason="device_not_found")
|
||||
except AcaiaError:
|
||||
_LOGGER.debug(
|
||||
"Error occurred while connecting to the scale during discovery",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.async_abort(reason="unknown")
|
||||
except AcaiaUnknownDevice:
|
||||
_LOGGER.debug("Unsupported device during discovery")
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
async def async_step_bluetooth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle confirmation of Bluetooth discovery."""
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered[CONF_NAME],
|
||||
data={
|
||||
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
|
||||
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
|
||||
},
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = placeholders = {
|
||||
CONF_NAME: self._discovered[CONF_NAME]
|
||||
}
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
description_placeholders=placeholders,
|
||||
)
|
|
@ -1,4 +0,0 @@
|
|||
"""Constants for component."""
|
||||
|
||||
DOMAIN = "acaia"
|
||||
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"
|
|
@ -1,86 +0,0 @@
|
|||
"""Coordinator for Acaia integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
|
||||
|
||||
|
||||
class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to handle fetching data from the scale."""
|
||||
|
||||
config_entry: AcaiaConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="acaia coordinator",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
self._scale = AcaiaScale(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=self.async_update_listeners,
|
||||
)
|
||||
|
||||
@property
|
||||
def scale(self) -> AcaiaScale:
|
||||
"""Return the scale object."""
|
||||
return self._scale
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data."""
|
||||
|
||||
# scale is already connected, return
|
||||
if self._scale.connected:
|
||||
return
|
||||
|
||||
# scale is not connected, try to connect
|
||||
try:
|
||||
await self._scale.connect(setup_tasks=False)
|
||||
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
|
||||
_LOGGER.debug(
|
||||
"Could not connect to scale: %s, Error: %s",
|
||||
self.config_entry.data[CONF_ADDRESS],
|
||||
ex,
|
||||
)
|
||||
self._scale.device_disconnected_handler(notify=False)
|
||||
return
|
||||
|
||||
# connected, set up background tasks
|
||||
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
|
||||
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.send_heartbeats(),
|
||||
name="acaia_heartbeat_task",
|
||||
)
|
||||
|
||||
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
|
||||
self._scale.process_queue_task = (
|
||||
self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.process_queue(),
|
||||
name="acaia_process_queue_task",
|
||||
)
|
||||
)
|
|
@ -1,40 +0,0 @@
|
|||
"""Base class for Acaia entities."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AcaiaCoordinator
|
||||
|
||||
|
||||
@dataclass
|
||||
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||
"""Common elements for all entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AcaiaCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._scale = coordinator.scale
|
||||
self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._scale.mac)},
|
||||
manufacturer="Acaia",
|
||||
model=self._scale.model,
|
||||
suggested_area="Kitchen",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Returns whether entity is available."""
|
||||
return super().available and self._scale.connected
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"default": "mdi:scale-balance"
|
||||
},
|
||||
"reset_timer": {
|
||||
"default": "mdi:timer-refresh"
|
||||
},
|
||||
"start_stop": {
|
||||
"default": "mdi:timer-play"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"domain": "acaia",
|
||||
"name": "Acaia",
|
||||
"bluetooth": [
|
||||
{
|
||||
"manufacturer_id": 16962
|
||||
},
|
||||
{
|
||||
"local_name": "ACAIA*"
|
||||
},
|
||||
{
|
||||
"local_name": "PYXIS-*"
|
||||
},
|
||||
{
|
||||
"local_name": "LUNAR-*"
|
||||
},
|
||||
{
|
||||
"local_name": "PROCHBT001"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/acaia",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"requirements": ["aioacaia==0.1.6"]
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"unsupported_device": "This device is not supported."
|
||||
},
|
||||
"error": {
|
||||
"device_not_found": "Device could not be found.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
},
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"name": "Tare"
|
||||
},
|
||||
"reset_timer": {
|
||||
"name": "Reset timer"
|
||||
},
|
||||
"start_stop": {
|
||||
"name": "Start/stop timer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -2,11 +2,13 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from accuweather import AccuWeather
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
@ -14,9 +16,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
|
@ -25,6 +25,17 @@ _LOGGER = logging.getLogger(__name__)
|
|||
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AccuWeatherData:
|
||||
"""Data for AccuWeather integration."""
|
||||
|
||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||
|
||||
|
||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry) -> bool:
|
||||
"""Set up AccuWeather as config entry."""
|
||||
api_key: str = entry.data[CONF_API_KEY]
|
||||
|
@ -39,7 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
|||
|
||||
coordinator_observation = AccuWeatherObservationDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"observation",
|
||||
|
@ -48,7 +58,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AccuWeatherConfigEntry)
|
|||
|
||||
coordinator_daily_forecast = AccuWeatherDailyForecastDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
accuweather,
|
||||
name,
|
||||
"daily forecast",
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
"""The AccuWeather coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
@ -11,7 +8,6 @@ from typing import TYPE_CHECKING, Any
|
|||
from accuweather import AccuWeather, ApiError, InvalidApiKeyError, RequestsExceededError
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
|
@ -27,17 +23,6 @@ EXCEPTIONS = (ApiError, ClientConnectorError, InvalidApiKeyError, RequestsExceed
|
|||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AccuWeatherData:
|
||||
"""Data for AccuWeather integration."""
|
||||
|
||||
coordinator_observation: AccuWeatherObservationDataUpdateCoordinator
|
||||
coordinator_daily_forecast: AccuWeatherDailyForecastDataUpdateCoordinator
|
||||
|
||||
|
||||
type AccuWeatherConfigEntry = ConfigEntry[AccuWeatherData]
|
||||
|
||||
|
||||
class AccuWeatherObservationDataUpdateCoordinator(
|
||||
DataUpdateCoordinator[dict[str, Any]]
|
||||
):
|
||||
|
@ -46,7 +31,6 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
|
@ -64,7 +48,6 @@ class AccuWeatherObservationDataUpdateCoordinator(
|
|||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
)
|
||||
|
@ -90,7 +73,6 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
|||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AccuWeatherConfigEntry,
|
||||
accuweather: AccuWeather,
|
||||
name: str,
|
||||
coordinator_type: str,
|
||||
|
@ -108,7 +90,6 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
|
|||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{name} ({coordinator_type})",
|
||||
update_interval=update_interval,
|
||||
)
|
||||
|
|
|
@ -8,7 +8,7 @@ from homeassistant.components.diagnostics import async_redact_data
|
|||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AccuWeatherConfigEntry, AccuWeatherData
|
||||
from . import AccuWeatherConfigEntry, AccuWeatherData
|
||||
|
||||
TO_REDACT = {CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE}
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@ from homeassistant.const import (
|
|||
UV_INDEX,
|
||||
UnitOfIrradiance,
|
||||
UnitOfLength,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
|
@ -28,6 +27,7 @@ from homeassistant.core import HomeAssistant, callback
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AccuWeatherConfigEntry
|
||||
from .const import (
|
||||
API_METRIC,
|
||||
ATTR_CATEGORY,
|
||||
|
@ -40,7 +40,6 @@ from .const import (
|
|||
MAX_FORECAST_DAYS,
|
||||
)
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
@ -280,15 +279,6 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
translation_key="realfeel_temperature_shade",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="RelativeHumidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
translation_key="humidity",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Precipitation",
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
|
@ -298,16 +288,6 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
attr_fn=lambda data: {"type": data["PrecipitationType"]},
|
||||
translation_key="precipitation",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Pressure",
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement=UnitOfPressure.HPA,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
translation_key="pressure",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="PressureTendency",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
|
@ -315,19 +295,9 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
value_fn=lambda data: cast(str, data["LocalizedText"]).lower(),
|
||||
translation_key="pressure_tendency",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="Temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]),
|
||||
translation_key="temperature",
|
||||
),
|
||||
AccuWeatherSensorDescription(
|
||||
key="UVIndex",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UV_INDEX,
|
||||
value_fn=lambda data: cast(int, data),
|
||||
attr_fn=lambda data: {ATTR_LEVEL: data["UVIndexText"]},
|
||||
|
@ -354,7 +324,6 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = (
|
|||
AccuWeatherSensorDescription(
|
||||
key="Wind",
|
||||
device_class=SensorDeviceClass.WIND_SPEED,
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
|
||||
value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]),
|
||||
|
|
|
@ -9,8 +9,8 @@ from accuweather.const import ENDPOINT
|
|||
from homeassistant.components import system_health
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import AccuWeatherConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AccuWeatherConfigEntry
|
||||
|
||||
|
||||
@callback
|
||||
|
|
|
@ -33,6 +33,7 @@ from homeassistant.core import HomeAssistant, callback
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
|
||||
from . import AccuWeatherConfigEntry, AccuWeatherData
|
||||
from .const import (
|
||||
API_METRIC,
|
||||
ATTR_DIRECTION,
|
||||
|
@ -42,9 +43,7 @@ from .const import (
|
|||
CONDITION_MAP,
|
||||
)
|
||||
from .coordinator import (
|
||||
AccuWeatherConfigEntry,
|
||||
AccuWeatherDailyForecastDataUpdateCoordinator,
|
||||
AccuWeatherData,
|
||||
AccuWeatherObservationDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from .hub import PulseHub
|
||||
|
||||
|
@ -18,9 +17,6 @@ async def async_setup_entry(
|
|||
hass: HomeAssistant, config_entry: AcmedaConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Rollease Acmeda Automate hub from a config entry."""
|
||||
|
||||
await _migrate_unique_ids(hass, config_entry)
|
||||
|
||||
hub = PulseHub(hass, config_entry)
|
||||
|
||||
if not await hub.async_setup():
|
||||
|
@ -32,19 +28,6 @@ async def async_setup_entry(
|
|||
return True
|
||||
|
||||
|
||||
async def _migrate_unique_ids(hass: HomeAssistant, entry: AcmedaConfigEntry) -> None:
|
||||
"""Migrate pre-config flow unique ids."""
|
||||
entity_registry = er.async_get(hass)
|
||||
registry_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, entry.entry_id
|
||||
)
|
||||
for reg_entry in registry_entries:
|
||||
if isinstance(reg_entry.unique_id, int): # type: ignore[unreachable]
|
||||
entity_registry.async_update_entity( # type: ignore[unreachable]
|
||||
reg_entry.entity_id, new_unique_id=str(reg_entry.unique_id)
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: AcmedaConfigEntry
|
||||
) -> bool:
|
||||
|
|
|
@ -11,7 +11,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
|||
from .const import ACMEDA_ENTITY_REMOVE, DOMAIN, LOGGER
|
||||
|
||||
|
||||
class AcmedaEntity(entity.Entity):
|
||||
class AcmedaBase(entity.Entity):
|
||||
"""Base representation of an Acmeda roller."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
@ -67,7 +67,7 @@ class AcmedaEntity(entity.Entity):
|
|||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the unique ID of this roller."""
|
||||
return str(self.roller.id)
|
||||
return self.roller.id # type: ignore[no-any-return]
|
||||
|
||||
@property
|
||||
def device_id(self) -> str:
|
|
@ -14,8 +14,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AcmedaConfigEntry
|
||||
from .base import AcmedaBase
|
||||
from .const import ACMEDA_HUB_UPDATE
|
||||
from .entity import AcmedaEntity
|
||||
from .helpers import async_add_acmeda_entities
|
||||
|
||||
|
||||
|
@ -44,7 +44,7 @@ async def async_setup_entry(
|
|||
)
|
||||
|
||||
|
||||
class AcmedaCover(AcmedaEntity, CoverEntity):
|
||||
class AcmedaCover(AcmedaBase, CoverEntity):
|
||||
"""Representation of an Acmeda cover device."""
|
||||
|
||||
_attr_name = None
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/acmeda",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiopulse"],
|
||||
"requirements": ["aiopulse==0.4.6"]
|
||||
"requirements": ["aiopulse==0.4.4"]
|
||||
}
|
||||
|
|
|
@ -9,8 +9,8 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AcmedaConfigEntry
|
||||
from .base import AcmedaBase
|
||||
from .const import ACMEDA_HUB_UPDATE
|
||||
from .entity import AcmedaEntity
|
||||
from .helpers import async_add_acmeda_entities
|
||||
|
||||
|
||||
|
@ -39,7 +39,7 @@ async def async_setup_entry(
|
|||
)
|
||||
|
||||
|
||||
class AcmedaBattery(AcmedaEntity, SensorEntity):
|
||||
class AcmedaBattery(AcmedaBase, SensorEntity):
|
||||
"""Representation of an Acmeda cover sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
|
|
|
@ -9,7 +9,7 @@ from typing import Final
|
|||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
DOMAIN as DEVICE_TRACKER_DOMAIN,
|
||||
DOMAIN,
|
||||
PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA,
|
||||
DeviceScanner,
|
||||
)
|
||||
|
@ -36,7 +36,7 @@ def get_scanner(
|
|||
hass: HomeAssistant, config: ConfigType
|
||||
) -> ActiontecDeviceScanner | None:
|
||||
"""Validate the configuration and return an Actiontec scanner."""
|
||||
scanner = ActiontecDeviceScanner(config[DEVICE_TRACKER_DOMAIN])
|
||||
scanner = ActiontecDeviceScanner(config[DOMAIN])
|
||||
return scanner if scanner.success_init else None
|
||||
|
||||
|
||||
|
@ -51,6 +51,7 @@ class ActiontecDeviceScanner(DeviceScanner):
|
|||
self.last_results: list[Device] = []
|
||||
data = self.get_actiontec_data()
|
||||
self.success_init = data is not None
|
||||
_LOGGER.info("Scanner initialized")
|
||||
|
||||
def scan_devices(self) -> list[str]:
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
|
@ -69,7 +70,7 @@ class ActiontecDeviceScanner(DeviceScanner):
|
|||
|
||||
Return boolean if scanning successful.
|
||||
"""
|
||||
_LOGGER.debug("Scanning")
|
||||
_LOGGER.info("Scanning")
|
||||
if not self.success_init:
|
||||
return False
|
||||
|
||||
|
@ -78,7 +79,7 @@ class ActiontecDeviceScanner(DeviceScanner):
|
|||
self.last_results = [
|
||||
device for device in actiontec_data if device.timevalid > -60
|
||||
]
|
||||
_LOGGER.debug("Scan successful")
|
||||
_LOGGER.info("Scan successful")
|
||||
return True
|
||||
|
||||
def get_actiontec_data(self) -> list[Device] | None:
|
||||
|
|
|
@ -130,7 +130,7 @@ class AdaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
async_get_clientsession(self.hass), account_id, password
|
||||
)
|
||||
if token is None:
|
||||
_LOGGER.debug("Adax: Failed to login to retrieve token")
|
||||
_LOGGER.info("Adax: Failed to login to retrieve token")
|
||||
errors["base"] = "cannot_connect"
|
||||
return self.async_show_form(
|
||||
step_id="cloud",
|
||||
|
|
|
@ -7,6 +7,7 @@ from typing import Any
|
|||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import HassioServiceInfo
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
|
@ -17,7 +18,6 @@ from homeassistant.const import (
|
|||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
|
|
@ -66,20 +66,10 @@
|
|||
}
|
||||
},
|
||||
"services": {
|
||||
"add_url": {
|
||||
"service": "mdi:link-plus"
|
||||
},
|
||||
"remove_url": {
|
||||
"service": "mdi:link-off"
|
||||
},
|
||||
"enable_url": {
|
||||
"service": "mdi:link-variant"
|
||||
},
|
||||
"disable_url": {
|
||||
"service": "mdi:link-variant-off"
|
||||
},
|
||||
"refresh": {
|
||||
"service": "mdi:refresh"
|
||||
}
|
||||
"add_url": "mdi:link-plus",
|
||||
"remove_url": "mdi:link-off",
|
||||
"enable_url": "mdi:link-variant",
|
||||
"disable_url": "mdi:link-variant-off",
|
||||
"refresh": "mdi:refresh"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
"""Support for Automation Device Specification (ADS)."""
|
||||
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
from collections import namedtuple
|
||||
import ctypes
|
||||
import logging
|
||||
import struct
|
||||
import threading
|
||||
|
||||
import pyads
|
||||
import voluptuous as vol
|
||||
|
@ -13,38 +19,42 @@ from homeassistant.const import (
|
|||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_ADS_VAR, DATA_ADS, DOMAIN, AdsType
|
||||
from .hub import AdsHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_ADS = "data_ads"
|
||||
|
||||
# Supported Types
|
||||
ADSTYPE_BOOL = "bool"
|
||||
ADSTYPE_BYTE = "byte"
|
||||
ADSTYPE_DINT = "dint"
|
||||
ADSTYPE_INT = "int"
|
||||
ADSTYPE_UDINT = "udint"
|
||||
ADSTYPE_UINT = "uint"
|
||||
|
||||
ADS_TYPEMAP = {
|
||||
AdsType.BOOL: pyads.PLCTYPE_BOOL,
|
||||
AdsType.BYTE: pyads.PLCTYPE_BYTE,
|
||||
AdsType.INT: pyads.PLCTYPE_INT,
|
||||
AdsType.UINT: pyads.PLCTYPE_UINT,
|
||||
AdsType.SINT: pyads.PLCTYPE_SINT,
|
||||
AdsType.USINT: pyads.PLCTYPE_USINT,
|
||||
AdsType.DINT: pyads.PLCTYPE_DINT,
|
||||
AdsType.UDINT: pyads.PLCTYPE_UDINT,
|
||||
AdsType.WORD: pyads.PLCTYPE_WORD,
|
||||
AdsType.DWORD: pyads.PLCTYPE_DWORD,
|
||||
AdsType.REAL: pyads.PLCTYPE_REAL,
|
||||
AdsType.LREAL: pyads.PLCTYPE_LREAL,
|
||||
AdsType.STRING: pyads.PLCTYPE_STRING,
|
||||
AdsType.TIME: pyads.PLCTYPE_TIME,
|
||||
AdsType.DATE: pyads.PLCTYPE_DATE,
|
||||
AdsType.DATE_AND_TIME: pyads.PLCTYPE_DT,
|
||||
AdsType.TOD: pyads.PLCTYPE_TOD,
|
||||
ADSTYPE_BOOL: pyads.PLCTYPE_BOOL,
|
||||
ADSTYPE_BYTE: pyads.PLCTYPE_BYTE,
|
||||
ADSTYPE_DINT: pyads.PLCTYPE_DINT,
|
||||
ADSTYPE_INT: pyads.PLCTYPE_INT,
|
||||
ADSTYPE_UDINT: pyads.PLCTYPE_UDINT,
|
||||
ADSTYPE_UINT: pyads.PLCTYPE_UINT,
|
||||
}
|
||||
|
||||
CONF_ADS_FACTOR = "factor"
|
||||
CONF_ADS_TYPE = "adstype"
|
||||
CONF_ADS_VALUE = "value"
|
||||
CONF_ADS_VAR = "adsvar"
|
||||
CONF_ADS_VAR_BRIGHTNESS = "adsvar_brightness"
|
||||
CONF_ADS_VAR_POSITION = "adsvar_position"
|
||||
|
||||
STATE_KEY_STATE = "state"
|
||||
STATE_KEY_BRIGHTNESS = "brightness"
|
||||
STATE_KEY_POSITION = "position"
|
||||
|
||||
DOMAIN = "ads"
|
||||
|
||||
SERVICE_WRITE_DATA_BY_NAME = "write_data_by_name"
|
||||
|
||||
|
@ -63,7 +73,16 @@ CONFIG_SCHEMA = vol.Schema(
|
|||
|
||||
SCHEMA_SERVICE_WRITE_DATA_BY_NAME = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADS_TYPE): vol.Coerce(AdsType),
|
||||
vol.Required(CONF_ADS_TYPE): vol.In(
|
||||
[
|
||||
ADSTYPE_INT,
|
||||
ADSTYPE_UINT,
|
||||
ADSTYPE_BYTE,
|
||||
ADSTYPE_BOOL,
|
||||
ADSTYPE_DINT,
|
||||
ADSTYPE_UDINT,
|
||||
]
|
||||
),
|
||||
vol.Required(CONF_ADS_VALUE): vol.Coerce(int),
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
}
|
||||
|
@ -97,9 +116,9 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
|
||||
def handle_write_data_by_name(call: ServiceCall) -> None:
|
||||
"""Write a value to the connected ADS device."""
|
||||
ads_var: str = call.data[CONF_ADS_VAR]
|
||||
ads_type: AdsType = call.data[CONF_ADS_TYPE]
|
||||
value: int = call.data[CONF_ADS_VALUE]
|
||||
ads_var = call.data[CONF_ADS_VAR]
|
||||
ads_type = call.data[CONF_ADS_TYPE]
|
||||
value = call.data[CONF_ADS_VALUE]
|
||||
|
||||
try:
|
||||
ads.write_by_name(ads_var, value, ADS_TYPEMAP[ads_type])
|
||||
|
@ -114,3 +133,181 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# Tuple to hold data needed for notification
|
||||
NotificationItem = namedtuple( # noqa: PYI024
|
||||
"NotificationItem", "hnotify huser name plc_datatype callback"
|
||||
)
|
||||
|
||||
|
||||
class AdsHub:
|
||||
"""Representation of an ADS connection."""
|
||||
|
||||
def __init__(self, ads_client):
|
||||
"""Initialize the ADS hub."""
|
||||
self._client = ads_client
|
||||
self._client.open()
|
||||
|
||||
# All ADS devices are registered here
|
||||
self._devices = []
|
||||
self._notification_items = {}
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def shutdown(self, *args, **kwargs):
|
||||
"""Shutdown ADS connection."""
|
||||
|
||||
_LOGGER.debug("Shutting down ADS")
|
||||
for notification_item in self._notification_items.values():
|
||||
_LOGGER.debug(
|
||||
"Deleting device notification %d, %d",
|
||||
notification_item.hnotify,
|
||||
notification_item.huser,
|
||||
)
|
||||
try:
|
||||
self._client.del_device_notification(
|
||||
notification_item.hnotify, notification_item.huser
|
||||
)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error(err)
|
||||
try:
|
||||
self._client.close()
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error(err)
|
||||
|
||||
def register_device(self, device):
|
||||
"""Register a new device."""
|
||||
self._devices.append(device)
|
||||
|
||||
def write_by_name(self, name, value, plc_datatype):
|
||||
"""Write a value to the device."""
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
return self._client.write_by_name(name, value, plc_datatype)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error("Error writing %s: %s", name, err)
|
||||
|
||||
def read_by_name(self, name, plc_datatype):
|
||||
"""Read a value from the device."""
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
return self._client.read_by_name(name, plc_datatype)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error("Error reading %s: %s", name, err)
|
||||
|
||||
def add_device_notification(self, name, plc_datatype, callback):
|
||||
"""Add a notification to the ADS devices."""
|
||||
|
||||
attr = pyads.NotificationAttrib(ctypes.sizeof(plc_datatype))
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
hnotify, huser = self._client.add_device_notification(
|
||||
name, attr, self._device_notification_callback
|
||||
)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error("Error subscribing to %s: %s", name, err)
|
||||
else:
|
||||
hnotify = int(hnotify)
|
||||
self._notification_items[hnotify] = NotificationItem(
|
||||
hnotify, huser, name, plc_datatype, callback
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Added device notification %d for variable %s", hnotify, name
|
||||
)
|
||||
|
||||
def _device_notification_callback(self, notification, name):
|
||||
"""Handle device notifications."""
|
||||
contents = notification.contents
|
||||
|
||||
hnotify = int(contents.hNotification)
|
||||
_LOGGER.debug("Received notification %d", hnotify)
|
||||
|
||||
# get dynamically sized data array
|
||||
data_size = contents.cbSampleSize
|
||||
data = (ctypes.c_ubyte * data_size).from_address(
|
||||
ctypes.addressof(contents)
|
||||
+ pyads.structs.SAdsNotificationHeader.data.offset
|
||||
)
|
||||
|
||||
try:
|
||||
with self._lock:
|
||||
notification_item = self._notification_items[hnotify]
|
||||
except KeyError:
|
||||
_LOGGER.error("Unknown device notification handle: %d", hnotify)
|
||||
return
|
||||
|
||||
# Parse data to desired datatype
|
||||
if notification_item.plc_datatype == pyads.PLCTYPE_BOOL:
|
||||
value = bool(struct.unpack("<?", bytearray(data))[0])
|
||||
elif notification_item.plc_datatype == pyads.PLCTYPE_INT:
|
||||
value = struct.unpack("<h", bytearray(data))[0]
|
||||
elif notification_item.plc_datatype == pyads.PLCTYPE_BYTE:
|
||||
value = struct.unpack("<B", bytearray(data))[0]
|
||||
elif notification_item.plc_datatype == pyads.PLCTYPE_UINT:
|
||||
value = struct.unpack("<H", bytearray(data))[0]
|
||||
elif notification_item.plc_datatype == pyads.PLCTYPE_DINT:
|
||||
value = struct.unpack("<i", bytearray(data))[0]
|
||||
elif notification_item.plc_datatype == pyads.PLCTYPE_UDINT:
|
||||
value = struct.unpack("<I", bytearray(data))[0]
|
||||
else:
|
||||
value = bytearray(data)
|
||||
_LOGGER.warning("No callback available for this datatype")
|
||||
|
||||
notification_item.callback(notification_item.name, value)
|
||||
|
||||
|
||||
class AdsEntity(Entity):
|
||||
"""Representation of ADS entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, ads_hub, name, ads_var):
|
||||
"""Initialize ADS binary sensor."""
|
||||
self._state_dict = {}
|
||||
self._state_dict[STATE_KEY_STATE] = None
|
||||
self._ads_hub = ads_hub
|
||||
self._ads_var = ads_var
|
||||
self._event = None
|
||||
self._attr_unique_id = ads_var
|
||||
self._attr_name = name
|
||||
|
||||
async def async_initialize_device(
|
||||
self, ads_var, plctype, state_key=STATE_KEY_STATE, factor=None
|
||||
):
|
||||
"""Register device notification."""
|
||||
|
||||
def update(name, value):
|
||||
"""Handle device notifications."""
|
||||
_LOGGER.debug("Variable %s changed its value to %d", name, value)
|
||||
|
||||
if factor is None:
|
||||
self._state_dict[state_key] = value
|
||||
else:
|
||||
self._state_dict[state_key] = value / factor
|
||||
|
||||
asyncio.run_coroutine_threadsafe(async_event_set(), self.hass.loop)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
async def async_event_set():
|
||||
"""Set event in async context."""
|
||||
self._event.set()
|
||||
|
||||
self._event = asyncio.Event()
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ads_hub.add_device_notification, ads_var, plctype, update
|
||||
)
|
||||
try:
|
||||
async with timeout(10):
|
||||
await self._event.wait()
|
||||
except TimeoutError:
|
||||
_LOGGER.debug("Variable %s: Timeout during first update", ads_var)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return False if state has not been updated yet."""
|
||||
return self._state_dict[STATE_KEY_STATE] is not None
|
||||
|
|
|
@ -17,9 +17,7 @@ import homeassistant.helpers.config_validation as cv
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE
|
||||
from .entity import AdsEntity
|
||||
from .hub import AdsHub
|
||||
from . import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE, AdsEntity
|
||||
|
||||
DEFAULT_NAME = "ADS binary sensor"
|
||||
PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend(
|
||||
|
@ -38,11 +36,11 @@ def setup_platform(
|
|||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Binary Sensor platform for ADS."""
|
||||
ads_hub = hass.data[DATA_ADS]
|
||||
ads_hub = hass.data.get(DATA_ADS)
|
||||
|
||||
ads_var: str = config[CONF_ADS_VAR]
|
||||
name: str = config[CONF_NAME]
|
||||
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
ads_var = config[CONF_ADS_VAR]
|
||||
name = config[CONF_NAME]
|
||||
device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
ads_sensor = AdsBinarySensor(ads_hub, name, ads_var, device_class)
|
||||
add_entities([ads_sensor])
|
||||
|
@ -51,13 +49,7 @@ def setup_platform(
|
|||
class AdsBinarySensor(AdsEntity, BinarySensorEntity):
|
||||
"""Representation of ADS binary sensors."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ads_hub: AdsHub,
|
||||
name: str,
|
||||
ads_var: str,
|
||||
device_class: BinarySensorDeviceClass | None,
|
||||
) -> None:
|
||||
def __init__(self, ads_hub, name, ads_var, device_class):
|
||||
"""Initialize ADS binary sensor."""
|
||||
super().__init__(ads_hub, name, ads_var)
|
||||
self._attr_device_class = device_class or BinarySensorDeviceClass.MOVING
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
"""Support for Automation Device Specification (ADS)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .hub import AdsHub
|
||||
|
||||
DOMAIN = "ads"
|
||||
|
||||
DATA_ADS: HassKey[AdsHub] = HassKey(DOMAIN)
|
||||
|
||||
CONF_ADS_VAR = "adsvar"
|
||||
|
||||
STATE_KEY_STATE = "state"
|
||||
|
||||
|
||||
class AdsType(StrEnum):
|
||||
"""Supported Types."""
|
||||
|
||||
BOOL = "bool"
|
||||
BYTE = "byte"
|
||||
INT = "int"
|
||||
UINT = "uint"
|
||||
SINT = "sint"
|
||||
USINT = "usint"
|
||||
DINT = "dint"
|
||||
UDINT = "udint"
|
||||
WORD = "word"
|
||||
DWORD = "dword"
|
||||
LREAL = "lreal"
|
||||
REAL = "real"
|
||||
STRING = "string"
|
||||
TIME = "time"
|
||||
DATE = "date"
|
||||
DATE_AND_TIME = "dt"
|
||||
TOD = "tod"
|
|
@ -11,7 +11,6 @@ from homeassistant.components.cover import (
|
|||
ATTR_POSITION,
|
||||
DEVICE_CLASSES_SCHEMA,
|
||||
PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA,
|
||||
CoverDeviceClass,
|
||||
CoverEntity,
|
||||
CoverEntityFeature,
|
||||
)
|
||||
|
@ -21,9 +20,14 @@ import homeassistant.helpers.config_validation as cv
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE
|
||||
from .entity import AdsEntity
|
||||
from .hub import AdsHub
|
||||
from . import (
|
||||
CONF_ADS_VAR,
|
||||
CONF_ADS_VAR_POSITION,
|
||||
DATA_ADS,
|
||||
STATE_KEY_POSITION,
|
||||
STATE_KEY_STATE,
|
||||
AdsEntity,
|
||||
)
|
||||
|
||||
DEFAULT_NAME = "ADS Cover"
|
||||
|
||||
|
@ -31,9 +35,6 @@ CONF_ADS_VAR_SET_POS = "adsvar_set_position"
|
|||
CONF_ADS_VAR_OPEN = "adsvar_open"
|
||||
CONF_ADS_VAR_CLOSE = "adsvar_close"
|
||||
CONF_ADS_VAR_STOP = "adsvar_stop"
|
||||
CONF_ADS_VAR_POSITION = "adsvar_position"
|
||||
|
||||
STATE_KEY_POSITION = "position"
|
||||
|
||||
PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
|
@ -58,14 +59,14 @@ def setup_platform(
|
|||
"""Set up the cover platform for ADS."""
|
||||
ads_hub = hass.data[DATA_ADS]
|
||||
|
||||
ads_var_is_closed: str = config[CONF_ADS_VAR]
|
||||
ads_var_position: str | None = config.get(CONF_ADS_VAR_POSITION)
|
||||
ads_var_pos_set: str | None = config.get(CONF_ADS_VAR_SET_POS)
|
||||
ads_var_open: str | None = config.get(CONF_ADS_VAR_OPEN)
|
||||
ads_var_close: str | None = config.get(CONF_ADS_VAR_CLOSE)
|
||||
ads_var_stop: str | None = config.get(CONF_ADS_VAR_STOP)
|
||||
name: str = config[CONF_NAME]
|
||||
device_class: CoverDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
ads_var_is_closed = config.get(CONF_ADS_VAR)
|
||||
ads_var_position = config.get(CONF_ADS_VAR_POSITION)
|
||||
ads_var_pos_set = config.get(CONF_ADS_VAR_SET_POS)
|
||||
ads_var_open = config.get(CONF_ADS_VAR_OPEN)
|
||||
ads_var_close = config.get(CONF_ADS_VAR_CLOSE)
|
||||
ads_var_stop = config.get(CONF_ADS_VAR_STOP)
|
||||
name = config[CONF_NAME]
|
||||
device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
add_entities(
|
||||
[
|
||||
|
@ -89,16 +90,16 @@ class AdsCover(AdsEntity, CoverEntity):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
ads_hub: AdsHub,
|
||||
ads_var_is_closed: str,
|
||||
ads_var_position: str | None,
|
||||
ads_var_pos_set: str | None,
|
||||
ads_var_open: str | None,
|
||||
ads_var_close: str | None,
|
||||
ads_var_stop: str | None,
|
||||
name: str,
|
||||
device_class: CoverDeviceClass | None,
|
||||
) -> None:
|
||||
ads_hub,
|
||||
ads_var_is_closed,
|
||||
ads_var_position,
|
||||
ads_var_pos_set,
|
||||
ads_var_open,
|
||||
ads_var_close,
|
||||
ads_var_stop,
|
||||
name,
|
||||
device_class,
|
||||
):
|
||||
"""Initialize AdsCover entity."""
|
||||
super().__init__(ads_hub, name, ads_var_is_closed)
|
||||
if self._attr_unique_id is None:
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
"""Support for Automation Device Specification (ADS)."""
|
||||
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import STATE_KEY_STATE
|
||||
from .hub import AdsHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AdsEntity(Entity):
|
||||
"""Representation of ADS entity."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, ads_hub: AdsHub, name: str, ads_var: str) -> None:
|
||||
"""Initialize ADS binary sensor."""
|
||||
self._state_dict: dict[str, Any] = {}
|
||||
self._state_dict[STATE_KEY_STATE] = None
|
||||
self._ads_hub = ads_hub
|
||||
self._ads_var = ads_var
|
||||
self._event: asyncio.Event | None = None
|
||||
self._attr_unique_id = ads_var
|
||||
self._attr_name = name
|
||||
|
||||
async def async_initialize_device(
|
||||
self,
|
||||
ads_var: str,
|
||||
plctype: type,
|
||||
state_key: str = STATE_KEY_STATE,
|
||||
factor: int | None = None,
|
||||
) -> None:
|
||||
"""Register device notification."""
|
||||
|
||||
def update(name, value):
|
||||
"""Handle device notifications."""
|
||||
_LOGGER.debug("Variable %s changed its value to %d", name, value)
|
||||
|
||||
if factor is None:
|
||||
self._state_dict[state_key] = value
|
||||
else:
|
||||
self._state_dict[state_key] = value / factor
|
||||
|
||||
asyncio.run_coroutine_threadsafe(async_event_set(), self.hass.loop)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
async def async_event_set():
|
||||
"""Set event in async context."""
|
||||
self._event.set()
|
||||
|
||||
self._event = asyncio.Event()
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ads_hub.add_device_notification, ads_var, plctype, update
|
||||
)
|
||||
try:
|
||||
async with timeout(10):
|
||||
await self._event.wait()
|
||||
except TimeoutError:
|
||||
_LOGGER.debug("Variable %s: Timeout during first update", ads_var)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return False if state has not been updated yet."""
|
||||
return self._state_dict[STATE_KEY_STATE] is not None
|
|
@ -1,151 +0,0 @@
|
|||
"""Support for Automation Device Specification (ADS)."""
|
||||
|
||||
from collections import namedtuple
|
||||
import ctypes
|
||||
import logging
|
||||
import struct
|
||||
import threading
|
||||
|
||||
import pyads
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Tuple to hold data needed for notification
|
||||
NotificationItem = namedtuple( # noqa: PYI024
|
||||
"NotificationItem", "hnotify huser name plc_datatype callback"
|
||||
)
|
||||
|
||||
|
||||
class AdsHub:
|
||||
"""Representation of an ADS connection."""
|
||||
|
||||
def __init__(self, ads_client):
|
||||
"""Initialize the ADS hub."""
|
||||
self._client = ads_client
|
||||
self._client.open()
|
||||
|
||||
# All ADS devices are registered here
|
||||
self._devices = []
|
||||
self._notification_items = {}
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def shutdown(self, *args, **kwargs):
|
||||
"""Shutdown ADS connection."""
|
||||
|
||||
_LOGGER.debug("Shutting down ADS")
|
||||
for notification_item in self._notification_items.values():
|
||||
_LOGGER.debug(
|
||||
"Deleting device notification %d, %d",
|
||||
notification_item.hnotify,
|
||||
notification_item.huser,
|
||||
)
|
||||
try:
|
||||
self._client.del_device_notification(
|
||||
notification_item.hnotify, notification_item.huser
|
||||
)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error(err)
|
||||
try:
|
||||
self._client.close()
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error(err)
|
||||
|
||||
def register_device(self, device):
|
||||
"""Register a new device."""
|
||||
self._devices.append(device)
|
||||
|
||||
def write_by_name(self, name, value, plc_datatype):
|
||||
"""Write a value to the device."""
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
return self._client.write_by_name(name, value, plc_datatype)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error("Error writing %s: %s", name, err)
|
||||
|
||||
def read_by_name(self, name, plc_datatype):
|
||||
"""Read a value from the device."""
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
return self._client.read_by_name(name, plc_datatype)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error("Error reading %s: %s", name, err)
|
||||
|
||||
def add_device_notification(self, name, plc_datatype, callback):
|
||||
"""Add a notification to the ADS devices."""
|
||||
|
||||
attr = pyads.NotificationAttrib(ctypes.sizeof(plc_datatype))
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
hnotify, huser = self._client.add_device_notification(
|
||||
name, attr, self._device_notification_callback
|
||||
)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error("Error subscribing to %s: %s", name, err)
|
||||
else:
|
||||
hnotify = int(hnotify)
|
||||
self._notification_items[hnotify] = NotificationItem(
|
||||
hnotify, huser, name, plc_datatype, callback
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Added device notification %d for variable %s", hnotify, name
|
||||
)
|
||||
|
||||
def _device_notification_callback(self, notification, name):
|
||||
"""Handle device notifications."""
|
||||
contents = notification.contents
|
||||
hnotify = int(contents.hNotification)
|
||||
_LOGGER.debug("Received notification %d", hnotify)
|
||||
|
||||
# Get dynamically sized data array
|
||||
data_size = contents.cbSampleSize
|
||||
data_address = (
|
||||
ctypes.addressof(contents)
|
||||
+ pyads.structs.SAdsNotificationHeader.data.offset
|
||||
)
|
||||
data = (ctypes.c_ubyte * data_size).from_address(data_address)
|
||||
|
||||
# Acquire notification item
|
||||
with self._lock:
|
||||
notification_item = self._notification_items.get(hnotify)
|
||||
|
||||
if not notification_item:
|
||||
_LOGGER.error("Unknown device notification handle: %d", hnotify)
|
||||
return
|
||||
|
||||
# Data parsing based on PLC data type
|
||||
plc_datatype = notification_item.plc_datatype
|
||||
unpack_formats = {
|
||||
pyads.PLCTYPE_BYTE: "<b",
|
||||
pyads.PLCTYPE_INT: "<h",
|
||||
pyads.PLCTYPE_UINT: "<H",
|
||||
pyads.PLCTYPE_SINT: "<b",
|
||||
pyads.PLCTYPE_USINT: "<B",
|
||||
pyads.PLCTYPE_DINT: "<i",
|
||||
pyads.PLCTYPE_UDINT: "<I",
|
||||
pyads.PLCTYPE_WORD: "<H",
|
||||
pyads.PLCTYPE_DWORD: "<I",
|
||||
pyads.PLCTYPE_LREAL: "<d",
|
||||
pyads.PLCTYPE_REAL: "<f",
|
||||
pyads.PLCTYPE_TOD: "<i", # Treat as DINT
|
||||
pyads.PLCTYPE_DATE: "<i", # Treat as DINT
|
||||
pyads.PLCTYPE_DT: "<i", # Treat as DINT
|
||||
pyads.PLCTYPE_TIME: "<i", # Treat as DINT
|
||||
}
|
||||
|
||||
if plc_datatype == pyads.PLCTYPE_BOOL:
|
||||
value = bool(struct.unpack("<?", bytearray(data))[0])
|
||||
elif plc_datatype == pyads.PLCTYPE_STRING:
|
||||
value = (
|
||||
bytearray(data).split(b"\x00", 1)[0].decode("utf-8", errors="ignore")
|
||||
)
|
||||
elif plc_datatype in unpack_formats:
|
||||
value = struct.unpack(unpack_formats[plc_datatype], bytearray(data))[0]
|
||||
else:
|
||||
value = bytearray(data)
|
||||
_LOGGER.warning("No callback available for this datatype")
|
||||
|
||||
notification_item.callback(notification_item.name, value)
|
|
@ -1,7 +1,5 @@
|
|||
{
|
||||
"services": {
|
||||
"write_data_by_name": {
|
||||
"service": "mdi:pencil"
|
||||
}
|
||||
"write_data_by_name": "mdi:pencil"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,12 +19,14 @@ import homeassistant.helpers.config_validation as cv
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE
|
||||
from .entity import AdsEntity
|
||||
from .hub import AdsHub
|
||||
|
||||
CONF_ADS_VAR_BRIGHTNESS = "adsvar_brightness"
|
||||
STATE_KEY_BRIGHTNESS = "brightness"
|
||||
from . import (
|
||||
CONF_ADS_VAR,
|
||||
CONF_ADS_VAR_BRIGHTNESS,
|
||||
DATA_ADS,
|
||||
STATE_KEY_BRIGHTNESS,
|
||||
STATE_KEY_STATE,
|
||||
AdsEntity,
|
||||
)
|
||||
|
||||
DEFAULT_NAME = "ADS Light"
|
||||
PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend(
|
||||
|
@ -43,11 +45,11 @@ def setup_platform(
|
|||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the light platform for ADS."""
|
||||
ads_hub = hass.data[DATA_ADS]
|
||||
ads_hub = hass.data.get(DATA_ADS)
|
||||
|
||||
ads_var_enable: str = config[CONF_ADS_VAR]
|
||||
ads_var_brightness: str | None = config.get(CONF_ADS_VAR_BRIGHTNESS)
|
||||
name: str = config[CONF_NAME]
|
||||
ads_var_enable = config[CONF_ADS_VAR]
|
||||
ads_var_brightness = config.get(CONF_ADS_VAR_BRIGHTNESS)
|
||||
name = config[CONF_NAME]
|
||||
|
||||
add_entities([AdsLight(ads_hub, ads_var_enable, ads_var_brightness, name)])
|
||||
|
||||
|
@ -55,13 +57,7 @@ def setup_platform(
|
|||
class AdsLight(AdsEntity, LightEntity):
|
||||
"""Representation of ADS light."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ads_hub: AdsHub,
|
||||
ads_var_enable: str,
|
||||
ads_var_brightness: str | None,
|
||||
name: str,
|
||||
) -> None:
|
||||
def __init__(self, ads_hub, ads_var_enable, ads_var_brightness, name):
|
||||
"""Initialize AdsLight entity."""
|
||||
super().__init__(ads_hub, name, ads_var_enable)
|
||||
self._state_dict[STATE_KEY_BRIGHTNESS] = None
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"domain": "ads",
|
||||
"name": "ADS",
|
||||
"codeowners": ["@mrpasztoradam"],
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ads",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyads"],
|
||||
|
|
|
@ -1,86 +0,0 @@
|
|||
"""Support for ADS select entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pyads
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.select import (
|
||||
PLATFORM_SCHEMA as SELECT_PLATFORM_SCHEMA,
|
||||
SelectEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADS_VAR, DATA_ADS
|
||||
from .entity import AdsEntity
|
||||
from .hub import AdsHub
|
||||
|
||||
DEFAULT_NAME = "ADS select"
|
||||
|
||||
CONF_OPTIONS = "options"
|
||||
|
||||
PLATFORM_SCHEMA = SELECT_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Required(CONF_OPTIONS): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up an ADS select device."""
|
||||
ads_hub = hass.data[DATA_ADS]
|
||||
|
||||
ads_var: str = config[CONF_ADS_VAR]
|
||||
name: str = config[CONF_NAME]
|
||||
options: list[str] = config[CONF_OPTIONS]
|
||||
|
||||
entity = AdsSelect(ads_hub, ads_var, name, options)
|
||||
|
||||
add_entities([entity])
|
||||
|
||||
|
||||
class AdsSelect(AdsEntity, SelectEntity):
|
||||
"""Representation of an ADS select entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ads_hub: AdsHub,
|
||||
ads_var: str,
|
||||
name: str,
|
||||
options: list[str],
|
||||
) -> None:
|
||||
"""Initialize the AdsSelect entity."""
|
||||
super().__init__(ads_hub, name, ads_var)
|
||||
self._attr_options = options
|
||||
self._attr_current_option = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register device notification."""
|
||||
await self.async_initialize_device(self._ads_var, pyads.PLCTYPE_INT)
|
||||
self._ads_hub.add_device_notification(
|
||||
self._ads_var, pyads.PLCTYPE_INT, self._handle_ads_value
|
||||
)
|
||||
|
||||
def select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
if option in self._attr_options:
|
||||
index = self._attr_options.index(option)
|
||||
self._ads_hub.write_by_name(self._ads_var, index, pyads.PLCTYPE_INT)
|
||||
self._attr_current_option = option
|
||||
|
||||
def _handle_ads_value(self, name: str, value: int) -> None:
|
||||
"""Handle the value update from ADS."""
|
||||
if 0 <= value < len(self._attr_options):
|
||||
self._attr_current_option = self._attr_options[value]
|
||||
self.schedule_update_ha_state()
|
|
@ -5,54 +5,41 @@ from __future__ import annotations
|
|||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS,
|
||||
DEVICE_CLASSES_SCHEMA as SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
STATE_CLASSES_SCHEMA as SENSOR_STATE_CLASSES_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME, CONF_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
|
||||
from . import ADS_TYPEMAP, CONF_ADS_FACTOR, CONF_ADS_TYPE
|
||||
from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE, AdsType
|
||||
from .entity import AdsEntity
|
||||
from .hub import AdsHub
|
||||
from .. import ads
|
||||
from . import (
|
||||
ADS_TYPEMAP,
|
||||
CONF_ADS_FACTOR,
|
||||
CONF_ADS_TYPE,
|
||||
CONF_ADS_VAR,
|
||||
STATE_KEY_STATE,
|
||||
AdsEntity,
|
||||
)
|
||||
|
||||
DEFAULT_NAME = "ADS sensor"
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_FACTOR): cv.positive_int,
|
||||
vol.Optional(CONF_ADS_TYPE, default=AdsType.INT): vol.All(
|
||||
vol.Coerce(AdsType),
|
||||
vol.In(
|
||||
[
|
||||
AdsType.BOOL,
|
||||
AdsType.BYTE,
|
||||
AdsType.INT,
|
||||
AdsType.UINT,
|
||||
AdsType.SINT,
|
||||
AdsType.USINT,
|
||||
AdsType.DINT,
|
||||
AdsType.UDINT,
|
||||
AdsType.WORD,
|
||||
AdsType.DWORD,
|
||||
AdsType.LREAL,
|
||||
AdsType.REAL,
|
||||
]
|
||||
),
|
||||
vol.Optional(CONF_ADS_TYPE, default=ads.ADSTYPE_INT): vol.In(
|
||||
[
|
||||
ads.ADSTYPE_INT,
|
||||
ads.ADSTYPE_UINT,
|
||||
ads.ADSTYPE_BYTE,
|
||||
ads.ADSTYPE_DINT,
|
||||
ads.ADSTYPE_UDINT,
|
||||
]
|
||||
),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_STATE_CLASS): SENSOR_STATE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -64,26 +51,15 @@ def setup_platform(
|
|||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up an ADS sensor device."""
|
||||
ads_hub = hass.data[DATA_ADS]
|
||||
ads_hub = hass.data.get(ads.DATA_ADS)
|
||||
|
||||
ads_var: str = config[CONF_ADS_VAR]
|
||||
ads_type: AdsType = config[CONF_ADS_TYPE]
|
||||
name: str = config[CONF_NAME]
|
||||
factor: int | None = config.get(CONF_ADS_FACTOR)
|
||||
device_class: SensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
state_class: SensorStateClass | None = config.get(CONF_STATE_CLASS)
|
||||
unit_of_measurement: str | None = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
ads_var = config[CONF_ADS_VAR]
|
||||
ads_type = config[CONF_ADS_TYPE]
|
||||
name = config[CONF_NAME]
|
||||
unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
factor = config.get(CONF_ADS_FACTOR)
|
||||
|
||||
entity = AdsSensor(
|
||||
ads_hub,
|
||||
ads_var,
|
||||
ads_type,
|
||||
name,
|
||||
factor,
|
||||
device_class,
|
||||
state_class,
|
||||
unit_of_measurement,
|
||||
)
|
||||
entity = AdsSensor(ads_hub, ads_var, ads_type, name, unit_of_measurement, factor)
|
||||
|
||||
add_entities([entity])
|
||||
|
||||
|
@ -91,24 +67,12 @@ def setup_platform(
|
|||
class AdsSensor(AdsEntity, SensorEntity):
|
||||
"""Representation of an ADS sensor entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ads_hub: AdsHub,
|
||||
ads_var: str,
|
||||
ads_type: AdsType,
|
||||
name: str,
|
||||
factor: int | None,
|
||||
device_class: SensorDeviceClass | None,
|
||||
state_class: SensorStateClass | None,
|
||||
unit_of_measurement: str | None,
|
||||
) -> None:
|
||||
def __init__(self, ads_hub, ads_var, ads_type, name, unit_of_measurement, factor):
|
||||
"""Initialize AdsSensor entity."""
|
||||
super().__init__(ads_hub, name, ads_var)
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._ads_type = ads_type
|
||||
self._factor = factor
|
||||
self._attr_device_class = device_class
|
||||
self._attr_state_class = state_class
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register device notification."""
|
||||
|
|
|
@ -17,8 +17,7 @@ import homeassistant.helpers.config_validation as cv
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE
|
||||
from .entity import AdsEntity
|
||||
from . import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE, AdsEntity
|
||||
|
||||
DEFAULT_NAME = "ADS Switch"
|
||||
|
||||
|
@ -37,10 +36,10 @@ def setup_platform(
|
|||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up switch platform for ADS."""
|
||||
ads_hub = hass.data[DATA_ADS]
|
||||
ads_hub = hass.data.get(DATA_ADS)
|
||||
|
||||
name: str = config[CONF_NAME]
|
||||
ads_var: str = config[CONF_ADS_VAR]
|
||||
name = config[CONF_NAME]
|
||||
ads_var = config[CONF_ADS_VAR]
|
||||
|
||||
add_entities([AdsSwitch(ads_hub, name, ads_var)])
|
||||
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
"""Support for ADS valves."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pyads
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.valve import (
|
||||
DEVICE_CLASSES_SCHEMA as VALVE_DEVICE_CLASSES_SCHEMA,
|
||||
PLATFORM_SCHEMA as VALVE_PLATFORM_SCHEMA,
|
||||
ValveDeviceClass,
|
||||
ValveEntity,
|
||||
ValveEntityFeature,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADS_VAR, DATA_ADS
|
||||
from .entity import AdsEntity
|
||||
from .hub import AdsHub
|
||||
|
||||
DEFAULT_NAME = "ADS valve"
|
||||
|
||||
PLATFORM_SCHEMA = VALVE_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): VALVE_DEVICE_CLASSES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up an ADS valve device."""
|
||||
ads_hub = hass.data[DATA_ADS]
|
||||
|
||||
ads_var: str = config[CONF_ADS_VAR]
|
||||
name: str = config[CONF_NAME]
|
||||
device_class: ValveDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
entity = AdsValve(ads_hub, ads_var, name, device_class)
|
||||
|
||||
add_entities([entity])
|
||||
|
||||
|
||||
class AdsValve(AdsEntity, ValveEntity):
|
||||
"""Representation of an ADS valve entity."""
|
||||
|
||||
_attr_supported_features = ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ads_hub: AdsHub,
|
||||
ads_var: str,
|
||||
name: str,
|
||||
device_class: ValveDeviceClass | None,
|
||||
) -> None:
|
||||
"""Initialize AdsValve entity."""
|
||||
super().__init__(ads_hub, name, ads_var)
|
||||
self._attr_device_class = device_class
|
||||
self._attr_reports_position = False
|
||||
self._attr_is_closed = True
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register device notification."""
|
||||
await self.async_initialize_device(self._ads_var, pyads.PLCTYPE_BOOL)
|
||||
|
||||
def open_valve(self, **kwargs) -> None:
|
||||
"""Open the valve."""
|
||||
self._ads_hub.write_by_name(self._ads_var, True, pyads.PLCTYPE_BOOL)
|
||||
self._attr_is_closed = False
|
||||
|
||||
def close_valve(self, **kwargs) -> None:
|
||||
"""Close the valve."""
|
||||
self._ads_hub.write_by_name(self._ads_var, False, pyads.PLCTYPE_BOOL)
|
||||
self._attr_is_closed = True
|
|
@ -55,7 +55,6 @@ async def async_setup_entry(
|
|||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name="Advantage Air",
|
||||
update_method=async_get,
|
||||
update_interval=timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL),
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
{
|
||||
"services": {
|
||||
"set_time_to": {
|
||||
"service": "mdi:timer-cog"
|
||||
}
|
||||
"set_time_to": "mdi:timer-cog"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""The AEMET OpenData component."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||
|
@ -12,10 +13,20 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
|||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_STATION_UPDATES, PLATFORMS
|
||||
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AemetConfigEntry = ConfigEntry[AemetData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AemetData:
|
||||
"""Aemet runtime data."""
|
||||
|
||||
name: str
|
||||
coordinator: WeatherUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool:
|
||||
"""Set up AEMET OpenData as config entry."""
|
||||
|
@ -35,7 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
|||
except AemetError as err:
|
||||
raise ConfigEntryNotReady(err) from err
|
||||
|
||||
weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet)
|
||||
weather_coordinator = WeatherUpdateCoordinator(hass, aemet)
|
||||
await weather_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator)
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
@ -20,7 +19,6 @@ from aemet_opendata.helpers import dict_nested_value
|
|||
from aemet_opendata.interface import AEMET
|
||||
|
||||
from homeassistant.components.weather import Forecast
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
|
@ -31,16 +29,6 @@ _LOGGER = logging.getLogger(__name__)
|
|||
API_TIMEOUT: Final[int] = 120
|
||||
WEATHER_UPDATE_INTERVAL = timedelta(minutes=10)
|
||||
|
||||
type AemetConfigEntry = ConfigEntry[AemetData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AemetData:
|
||||
"""Aemet runtime data."""
|
||||
|
||||
name: str
|
||||
coordinator: WeatherUpdateCoordinator
|
||||
|
||||
|
||||
class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Weather data update coordinator."""
|
||||
|
@ -48,7 +36,6 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: AemetConfigEntry,
|
||||
aemet: AEMET,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
|
@ -57,7 +44,6 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=WEATHER_UPDATE_INTERVAL,
|
||||
)
|
||||
|
|
|
@ -6,7 +6,7 @@ from typing import Any
|
|||
|
||||
from aemet_opendata.const import AOD_COORDS
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.components.diagnostics.util import async_redact_data
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_LATITUDE,
|
||||
|
@ -15,7 +15,7 @@ from homeassistant.const import (
|
|||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AemetConfigEntry
|
||||
from . import AemetConfigEntry
|
||||
|
||||
TO_REDACT_CONFIG = [
|
||||
CONF_API_KEY,
|
||||
|
|
|
@ -55,6 +55,7 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .const import (
|
||||
ATTR_API_CONDITION,
|
||||
ATTR_API_FORECAST_CONDITION,
|
||||
|
@ -86,7 +87,7 @@ from .const import (
|
|||
ATTR_API_WIND_SPEED,
|
||||
CONDITIONS_MAP,
|
||||
)
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
|
||||
|
@ -248,7 +249,6 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||
name="Rain",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
key=ATTR_API_RAIN_PROB,
|
||||
|
@ -263,7 +263,6 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
|||
name="Snow",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
key=ATTR_API_SNOW_PROB,
|
||||
|
|
|
@ -27,8 +27,9 @@ from homeassistant.const import (
|
|||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .const import CONDITIONS_MAP
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
|
||||
|
|
|
@ -7,11 +7,7 @@
|
|||
}
|
||||
},
|
||||
"services": {
|
||||
"add_tracking": {
|
||||
"service": "mdi:package-variant-plus"
|
||||
},
|
||||
"remove_tracking": {
|
||||
"service": "mdi:package-variant-minus"
|
||||
}
|
||||
"add_tracking": "mdi:package-variant-plus",
|
||||
"remove_tracking": "mdi:package-variant-minus"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,12 @@ from __future__ import annotations
|
|||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_ARMED_NIGHT,
|
||||
STATE_ALARM_DISARMED,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
|
@ -60,37 +65,37 @@ class AgentBaseStation(AlarmControlPanelEntity):
|
|||
self._attr_available = self._client.is_available
|
||||
armed = self._client.is_armed
|
||||
if armed is None:
|
||||
self._attr_alarm_state = None
|
||||
self._attr_state = None
|
||||
return
|
||||
if armed:
|
||||
prof = (await self._client.get_active_profile()).lower()
|
||||
self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY
|
||||
self._attr_state = STATE_ALARM_ARMED_AWAY
|
||||
if prof == CONF_HOME_MODE_NAME:
|
||||
self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME
|
||||
self._attr_state = STATE_ALARM_ARMED_HOME
|
||||
elif prof == CONF_NIGHT_MODE_NAME:
|
||||
self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT
|
||||
self._attr_state = STATE_ALARM_ARMED_NIGHT
|
||||
else:
|
||||
self._attr_alarm_state = AlarmControlPanelState.DISARMED
|
||||
self._attr_state = STATE_ALARM_DISARMED
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Send disarm command."""
|
||||
await self._client.disarm()
|
||||
self._attr_alarm_state = AlarmControlPanelState.DISARMED
|
||||
self._attr_state = STATE_ALARM_DISARMED
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command. Uses custom mode."""
|
||||
await self._client.arm()
|
||||
await self._client.set_active_profile(CONF_AWAY_MODE_NAME)
|
||||
self._attr_alarm_state = AlarmControlPanelState.ARMED_AWAY
|
||||
self._attr_state = STATE_ALARM_ARMED_AWAY
|
||||
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command. Uses custom mode."""
|
||||
await self._client.arm()
|
||||
await self._client.set_active_profile(CONF_HOME_MODE_NAME)
|
||||
self._attr_alarm_state = AlarmControlPanelState.ARMED_HOME
|
||||
self._attr_state = STATE_ALARM_ARMED_HOME
|
||||
|
||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm night command. Uses custom mode."""
|
||||
await self._client.arm()
|
||||
await self._client.set_active_profile(CONF_NIGHT_MODE_NAME)
|
||||
self._attr_alarm_state = AlarmControlPanelState.ARMED_NIGHT
|
||||
self._attr_state = STATE_ALARM_ARMED_NIGHT
|
||||
|
|
|
@ -1,19 +1,9 @@
|
|||
{
|
||||
"services": {
|
||||
"start_recording": {
|
||||
"service": "mdi:record-rec"
|
||||
},
|
||||
"stop_recording": {
|
||||
"service": "mdi:stop"
|
||||
},
|
||||
"enable_alerts": {
|
||||
"service": "mdi:bell-alert"
|
||||
},
|
||||
"disable_alerts": {
|
||||
"service": "mdi:bell-off"
|
||||
},
|
||||
"snapshot": {
|
||||
"service": "mdi:camera"
|
||||
}
|
||||
"start_recording": "mdi:record-rec",
|
||||
"stop_recording": "mdi:stop",
|
||||
"enable_alerts": "mdi:bell-alert",
|
||||
"disable_alerts": "mdi:bell-off",
|
||||
"snapshot": "mdi:camera"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
"requirements": ["agent-py==0.0.23"]
|
||||
}
|
||||
|
|
|
@ -13,13 +13,11 @@ from homeassistant.helpers import config_validation as cv
|
|||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AirQualityEntity]] = HassKey(DOMAIN)
|
||||
ENTITY_ID_FORMAT: Final = DOMAIN + ".{}"
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
|
||||
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
|
||||
|
@ -56,7 +54,7 @@ PROP_TO_ATTR: Final[dict[str, str]] = {
|
|||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the air quality component."""
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AirQualityEntity](
|
||||
component = hass.data[DOMAIN] = EntityComponent[AirQualityEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
await component.async_setup(config)
|
||||
|
@ -65,12 +63,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
component: EntityComponent[AirQualityEntity] = hass.data[DOMAIN]
|
||||
return await component.async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
component: EntityComponent[AirQualityEntity] = hass.data[DOMAIN]
|
||||
return await component.async_unload_entry(entry)
|
||||
|
||||
|
||||
class AirQualityEntity(Entity):
|
||||
|
|
|
@ -2,14 +2,18 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from airgradient import AirGradientClient
|
||||
from dataclasses import dataclass
|
||||
|
||||
from airgradient import AirGradientClient, get_model_name
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirGradientConfigCoordinator, AirGradientMeasurementCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BUTTON,
|
||||
|
@ -21,7 +25,15 @@ PLATFORMS: list[Platform] = [
|
|||
]
|
||||
|
||||
|
||||
type AirGradientConfigEntry = ConfigEntry[AirGradientCoordinator]
|
||||
@dataclass
|
||||
class AirGradientData:
|
||||
"""AirGradient data class."""
|
||||
|
||||
measurement: AirGradientMeasurementCoordinator
|
||||
config: AirGradientConfigCoordinator
|
||||
|
||||
|
||||
type AirGradientConfigEntry = ConfigEntry[AirGradientData]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) -> bool:
|
||||
|
@ -31,11 +43,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry)
|
|||
entry.data[CONF_HOST], session=async_get_clientsession(hass)
|
||||
)
|
||||
|
||||
coordinator = AirGradientCoordinator(hass, client)
|
||||
measurement_coordinator = AirGradientMeasurementCoordinator(hass, client)
|
||||
config_coordinator = AirGradientConfigCoordinator(hass, client)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
await measurement_coordinator.async_config_entry_first_refresh()
|
||||
await config_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, measurement_coordinator.serial_number)},
|
||||
manufacturer="AirGradient",
|
||||
model=get_model_name(measurement_coordinator.data.model),
|
||||
model_id=measurement_coordinator.data.model,
|
||||
serial_number=measurement_coordinator.data.serial_number,
|
||||
sw_version=measurement_coordinator.data.firmware_version,
|
||||
)
|
||||
|
||||
entry.runtime_data = AirGradientData(
|
||||
measurement=measurement_coordinator,
|
||||
config=config_coordinator,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
|
|
@ -15,9 +15,8 @@ from homeassistant.core import HomeAssistant, callback
|
|||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AirGradientConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from . import DOMAIN, AirGradientConfigEntry
|
||||
from .coordinator import AirGradientConfigCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
|
||||
|
||||
|
@ -48,8 +47,8 @@ async def async_setup_entry(
|
|||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AirGradient button entities based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
model = coordinator.data.measures.model
|
||||
model = entry.runtime_data.measurement.data.model
|
||||
coordinator = entry.runtime_data.config
|
||||
|
||||
added_entities = False
|
||||
|
||||
|
@ -58,7 +57,7 @@ async def async_setup_entry(
|
|||
nonlocal added_entities
|
||||
|
||||
if (
|
||||
coordinator.data.config.configuration_control is ConfigurationControl.LOCAL
|
||||
coordinator.data.configuration_control is ConfigurationControl.LOCAL
|
||||
and not added_entities
|
||||
):
|
||||
entities = [AirGradientButton(coordinator, CO2_CALIBRATION)]
|
||||
|
@ -68,8 +67,7 @@ async def async_setup_entry(
|
|||
async_add_entities(entities)
|
||||
added_entities = True
|
||||
elif (
|
||||
coordinator.data.config.configuration_control
|
||||
is not ConfigurationControl.LOCAL
|
||||
coordinator.data.configuration_control is not ConfigurationControl.LOCAL
|
||||
and added_entities
|
||||
):
|
||||
entity_registry = er.async_get(hass)
|
||||
|
@ -89,10 +87,11 @@ class AirGradientButton(AirGradientEntity, ButtonEntity):
|
|||
"""Defines an AirGradient button."""
|
||||
|
||||
entity_description: AirGradientButtonEntityDescription
|
||||
coordinator: AirGradientConfigCoordinator
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirGradientCoordinator,
|
||||
coordinator: AirGradientConfigCoordinator,
|
||||
description: AirGradientButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize airgradient button."""
|
||||
|
|
|
@ -2,35 +2,24 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from airgradient import AirGradientClient, AirGradientError, Config, Measures
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AirGradientConfigEntry
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirGradientData:
|
||||
"""Class for AirGradient data."""
|
||||
|
||||
measures: Measures
|
||||
config: Config
|
||||
|
||||
|
||||
class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
||||
class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
||||
"""Class to manage fetching AirGradient data."""
|
||||
|
||||
config_entry: AirGradientConfigEntry
|
||||
_current_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
|
||||
"""Initialize coordinator."""
|
||||
|
@ -44,27 +33,25 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
|||
assert self.config_entry.unique_id
|
||||
self.serial_number = self.config_entry.unique_id
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
self._current_version = (
|
||||
await self.client.get_current_measures()
|
||||
).firmware_version
|
||||
|
||||
async def _async_update_data(self) -> AirGradientData:
|
||||
async def _async_update_data(self) -> _DataT:
|
||||
try:
|
||||
measures = await self.client.get_current_measures()
|
||||
config = await self.client.get_config()
|
||||
return await self._update_data()
|
||||
except AirGradientError as error:
|
||||
raise UpdateFailed(error) from error
|
||||
if measures.firmware_version != self._current_version:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, self.serial_number)}
|
||||
)
|
||||
assert device_entry
|
||||
device_registry.async_update_device(
|
||||
device_entry.id,
|
||||
sw_version=measures.firmware_version,
|
||||
)
|
||||
self._current_version = measures.firmware_version
|
||||
return AirGradientData(measures, config)
|
||||
|
||||
async def _update_data(self) -> _DataT:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class AirGradientMeasurementCoordinator(AirGradientCoordinator[Measures]):
|
||||
"""Class to manage fetching AirGradient data."""
|
||||
|
||||
async def _update_data(self) -> Measures:
|
||||
return await self.client.get_current_measures()
|
||||
|
||||
|
||||
class AirGradientConfigCoordinator(AirGradientCoordinator[Config]):
|
||||
"""Class to manage fetching AirGradient data."""
|
||||
|
||||
async def _update_data(self) -> Config:
|
||||
return await self.client.get_config()
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
"""Diagnostics support for Airgradient."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AirGradientConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AirGradientConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
return asdict(entry.runtime_data.data)
|
|
@ -1,7 +1,5 @@
|
|||
"""Base class for AirGradient entities."""
|
||||
|
||||
from airgradient import get_model_name
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
|
@ -17,12 +15,6 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]):
|
|||
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
||||
"""Initialize airgradient entity."""
|
||||
super().__init__(coordinator)
|
||||
measures = coordinator.data.measures
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.serial_number)},
|
||||
manufacturer="AirGradient",
|
||||
model=get_model_name(measures.model),
|
||||
model_id=measures.model,
|
||||
serial_number=coordinator.serial_number,
|
||||
sw_version=measures.firmware_version,
|
||||
)
|
||||
|
|
|
@ -6,6 +6,6 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airgradient==0.9.1"],
|
||||
"requirements": ["airgradient==0.8.0"],
|
||||
"zeroconf": ["_airgradient._tcp.local."]
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|||
|
||||
from . import AirGradientConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .coordinator import AirGradientConfigCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
|
||||
|
||||
|
@ -62,8 +62,8 @@ async def async_setup_entry(
|
|||
) -> None:
|
||||
"""Set up AirGradient number entities based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
model = coordinator.data.measures.model
|
||||
model = entry.runtime_data.measurement.data.model
|
||||
coordinator = entry.runtime_data.config
|
||||
|
||||
added_entities = False
|
||||
|
||||
|
@ -72,7 +72,7 @@ async def async_setup_entry(
|
|||
nonlocal added_entities
|
||||
|
||||
if (
|
||||
coordinator.data.config.configuration_control is ConfigurationControl.LOCAL
|
||||
coordinator.data.configuration_control is ConfigurationControl.LOCAL
|
||||
and not added_entities
|
||||
):
|
||||
entities = []
|
||||
|
@ -84,8 +84,7 @@ async def async_setup_entry(
|
|||
async_add_entities(entities)
|
||||
added_entities = True
|
||||
elif (
|
||||
coordinator.data.config.configuration_control
|
||||
is not ConfigurationControl.LOCAL
|
||||
coordinator.data.configuration_control is not ConfigurationControl.LOCAL
|
||||
and added_entities
|
||||
):
|
||||
entity_registry = er.async_get(hass)
|
||||
|
@ -105,10 +104,11 @@ class AirGradientNumber(AirGradientEntity, NumberEntity):
|
|||
"""Defines an AirGradient number entity."""
|
||||
|
||||
entity_description: AirGradientNumberEntityDescription
|
||||
coordinator: AirGradientConfigCoordinator
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirGradientCoordinator,
|
||||
coordinator: AirGradientConfigCoordinator,
|
||||
description: AirGradientNumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize AirGradient number."""
|
||||
|
@ -119,7 +119,7 @@ class AirGradientNumber(AirGradientEntity, NumberEntity):
|
|||
@property
|
||||
def native_value(self) -> int | None:
|
||||
"""Return the state of the number."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.config)
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the selected value."""
|
||||
|
|
|
@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|||
|
||||
from . import AirGradientConfigEntry
|
||||
from .const import DOMAIN, PM_STANDARD, PM_STANDARD_REVERSE
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .coordinator import AirGradientConfigCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
|
||||
|
||||
|
@ -144,11 +144,13 @@ async def async_setup_entry(
|
|||
) -> None:
|
||||
"""Set up AirGradient select entities based on a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
model = coordinator.data.measures.model
|
||||
coordinator = entry.runtime_data.config
|
||||
measurement_coordinator = entry.runtime_data.measurement
|
||||
|
||||
async_add_entities([AirGradientSelect(coordinator, CONFIG_CONTROL_ENTITY)])
|
||||
|
||||
model = measurement_coordinator.data.model
|
||||
|
||||
added_entities = False
|
||||
|
||||
@callback
|
||||
|
@ -156,7 +158,7 @@ async def async_setup_entry(
|
|||
nonlocal added_entities
|
||||
|
||||
if (
|
||||
coordinator.data.config.configuration_control is ConfigurationControl.LOCAL
|
||||
coordinator.data.configuration_control is ConfigurationControl.LOCAL
|
||||
and not added_entities
|
||||
):
|
||||
entities: list[AirGradientSelect] = [
|
||||
|
@ -177,8 +179,7 @@ async def async_setup_entry(
|
|||
async_add_entities(entities)
|
||||
added_entities = True
|
||||
elif (
|
||||
coordinator.data.config.configuration_control
|
||||
is not ConfigurationControl.LOCAL
|
||||
coordinator.data.configuration_control is not ConfigurationControl.LOCAL
|
||||
and added_entities
|
||||
):
|
||||
entity_registry = er.async_get(hass)
|
||||
|
@ -200,10 +201,11 @@ class AirGradientSelect(AirGradientEntity, SelectEntity):
|
|||
"""Defines an AirGradient select entity."""
|
||||
|
||||
entity_description: AirGradientSelectEntityDescription
|
||||
coordinator: AirGradientConfigCoordinator
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirGradientCoordinator,
|
||||
coordinator: AirGradientConfigCoordinator,
|
||||
description: AirGradientSelectEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize AirGradient select."""
|
||||
|
@ -214,7 +216,7 @@ class AirGradientSelect(AirGradientEntity, SelectEntity):
|
|||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the state of the select."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.config)
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue